Import L2 (activity-pub) as l2/
This commit is contained in:
20
l2/.env.example
Normal file
20
l2/.env.example
Normal file
@@ -0,0 +1,20 @@
|
||||
# L2 Server Configuration
|
||||
|
||||
# PostgreSQL password (REQUIRED - no default)
|
||||
POSTGRES_PASSWORD=changeme-generate-with-openssl-rand-hex-16
|
||||
|
||||
# Domain for this ActivityPub server
|
||||
ARTDAG_DOMAIN=artdag.rose-ash.com
|
||||
|
||||
# JWT secret for token signing (generate with: openssl rand -hex 32)
|
||||
JWT_SECRET=your-secret-here-generate-with-openssl-rand-hex-32
|
||||
|
||||
# L1 server URL for fetching content (images/videos)
|
||||
L1_PUBLIC_URL=https://celery-artdag.rose-ash.com
|
||||
|
||||
# Effects repository URL for linking to effect source code
|
||||
EFFECTS_REPO_URL=https://git.rose-ash.com/art-dag/effects
|
||||
|
||||
# Notes:
|
||||
# - ARTDAG_USER removed - now multi-actor, each registered user is their own actor
|
||||
# - L1 URL can also come from provenance data per-asset
|
||||
62
l2/.gitea/workflows/ci.yml
Normal file
62
l2/.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,62 @@
|
||||
name: Build and Deploy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
env:
|
||||
REGISTRY: registry.rose-ash.com:5000
|
||||
IMAGE: l2-server
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install tools
|
||||
run: |
|
||||
apt-get update && apt-get install -y --no-install-recommends openssh-client
|
||||
|
||||
- name: Set up SSH
|
||||
env:
|
||||
SSH_KEY: ${{ secrets.DEPLOY_SSH_KEY }}
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
mkdir -p ~/.ssh
|
||||
echo "$SSH_KEY" > ~/.ssh/id_rsa
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-keyscan -H "$DEPLOY_HOST" >> ~/.ssh/known_hosts 2>/dev/null || true
|
||||
|
||||
- name: Pull latest code on server
|
||||
env:
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
ssh "root@$DEPLOY_HOST" "
|
||||
cd /root/art-dag/activity-pub
|
||||
git fetch origin main
|
||||
git reset --hard origin/main
|
||||
"
|
||||
|
||||
- name: Build and push image
|
||||
env:
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
ssh "root@$DEPLOY_HOST" "
|
||||
cd /root/art-dag/activity-pub
|
||||
docker build --build-arg CACHEBUST=\$(date +%s) -t ${{ env.REGISTRY }}/${{ env.IMAGE }}:latest -t ${{ env.REGISTRY }}/${{ env.IMAGE }}:${{ github.sha }} .
|
||||
docker push ${{ env.REGISTRY }}/${{ env.IMAGE }}:latest
|
||||
docker push ${{ env.REGISTRY }}/${{ env.IMAGE }}:${{ github.sha }}
|
||||
"
|
||||
|
||||
- name: Deploy stack
|
||||
env:
|
||||
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
|
||||
run: |
|
||||
ssh "root@$DEPLOY_HOST" "
|
||||
cd /root/art-dag/activity-pub
|
||||
docker stack deploy -c docker-compose.yml activitypub
|
||||
echo 'Waiting for services to update...'
|
||||
sleep 10
|
||||
docker stack services activitypub
|
||||
"
|
||||
11
l2/.gitignore
vendored
Normal file
11
l2/.gitignore
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# Private keys - NEVER commit these
|
||||
*.pem
|
||||
keys/
|
||||
|
||||
# Secrets
|
||||
.env
|
||||
23
l2/Dockerfile
Normal file
23
l2/Dockerfile
Normal file
@@ -0,0 +1,23 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install git for pip to clone dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends git && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dependencies
|
||||
COPY requirements.txt .
|
||||
ARG CACHEBUST=1
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application
|
||||
COPY . .
|
||||
|
||||
# Create data directory
|
||||
RUN mkdir -p /data/l2
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV ARTDAG_DATA=/data/l2
|
||||
|
||||
# Default command runs the server
|
||||
CMD ["python", "server.py"]
|
||||
389
l2/README.md
Normal file
389
l2/README.md
Normal file
@@ -0,0 +1,389 @@
|
||||
# Art DAG L2 Server - ActivityPub
|
||||
|
||||
Ownership registry and ActivityPub federation for Art DAG. Manages asset provenance, cryptographic anchoring, and distributed identity.
|
||||
|
||||
## Features
|
||||
|
||||
- **Asset Registry**: Content-addressed assets with provenance tracking
|
||||
- **ActivityPub Federation**: Standard protocol for distributed social networking
|
||||
- **OpenTimestamps Anchoring**: Cryptographic proof of existence on Bitcoin blockchain
|
||||
- **L1 Integration**: Record and verify L1 rendering runs
|
||||
- **Storage Providers**: S3, IPFS, and local storage backends
|
||||
- **Scoped Authentication**: Secure token-based auth for federated L1 servers
|
||||
|
||||
## Dependencies
|
||||
|
||||
- **PostgreSQL**: Primary data storage
|
||||
- **artdag-common**: Shared templates and middleware
|
||||
- **cryptography**: RSA key generation and signing
|
||||
- **httpx**: Async HTTP client for federation
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Configure
|
||||
export ARTDAG_DOMAIN=artdag.example.com
|
||||
export ARTDAG_USER=giles
|
||||
export DATABASE_URL=postgresql://artdag:$POSTGRES_PASSWORD@localhost:5432/artdag
|
||||
export L1_SERVERS=https://celery-artdag.example.com
|
||||
|
||||
# Generate signing keys (required for federation)
|
||||
python setup_keys.py
|
||||
|
||||
# Start server
|
||||
python server.py
|
||||
```
|
||||
|
||||
## Docker Deployment
|
||||
|
||||
```bash
|
||||
docker stack deploy -c docker-compose.yml artdag-l2
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `ARTDAG_DOMAIN` | `artdag.rose-ash.com` | Domain for ActivityPub actors |
|
||||
| `ARTDAG_USER` | `giles` | Default username |
|
||||
| `ARTDAG_DATA` | `~/.artdag/l2` | Data directory |
|
||||
| `DATABASE_URL` | **(required)** | PostgreSQL connection |
|
||||
| `L1_SERVERS` | - | Comma-separated list of L1 server URLs |
|
||||
| `JWT_SECRET` | (generated) | JWT signing secret |
|
||||
| `HOST` | `0.0.0.0` | Server bind address |
|
||||
| `PORT` | `8200` | Server port |
|
||||
|
||||
### JWT Secret
|
||||
|
||||
The JWT secret signs authentication tokens. Without a persistent secret, tokens are invalidated on restart.
|
||||
|
||||
```bash
|
||||
# Generate a secret
|
||||
openssl rand -hex 32
|
||||
|
||||
# Set in environment
|
||||
export JWT_SECRET="your-generated-secret"
|
||||
|
||||
# Or use Docker secrets (recommended for production)
|
||||
echo "your-secret" | docker secret create jwt_secret -
|
||||
```
|
||||
|
||||
### RSA Keys
|
||||
|
||||
ActivityPub requires RSA keys for signing activities:
|
||||
|
||||
```bash
|
||||
# Generate keys
|
||||
python setup_keys.py
|
||||
|
||||
# Or with custom paths
|
||||
python setup_keys.py --data-dir /data/l2 --user giles
|
||||
```
|
||||
|
||||
Keys stored in `$ARTDAG_DATA/keys/`:
|
||||
- `{username}.pem` - Private key (chmod 600)
|
||||
- `{username}.pub` - Public key (in actor profile)
|
||||
|
||||
## Web UI
|
||||
|
||||
| Path | Description |
|
||||
|------|-------------|
|
||||
| `/` | Home page with stats |
|
||||
| `/login` | Login form |
|
||||
| `/register` | Registration form |
|
||||
| `/logout` | Log out |
|
||||
| `/assets` | Browse registered assets |
|
||||
| `/asset/{name}` | Asset detail page |
|
||||
| `/activities` | Published activities |
|
||||
| `/activity/{id}` | Activity detail |
|
||||
| `/users` | Registered users |
|
||||
| `/renderers` | L1 renderer connections |
|
||||
| `/anchors/ui` | OpenTimestamps management |
|
||||
| `/storage` | Storage provider config |
|
||||
| `/download/client` | Download CLI client |
|
||||
|
||||
## API Reference
|
||||
|
||||
Interactive docs: http://localhost:8200/docs
|
||||
|
||||
### Authentication
|
||||
|
||||
| Method | Path | Description |
|
||||
|--------|------|-------------|
|
||||
| POST | `/auth/register` | Register new user |
|
||||
| POST | `/auth/login` | Login, get JWT token |
|
||||
| GET | `/auth/me` | Get current user info |
|
||||
| POST | `/auth/verify` | Verify token (for L1 servers) |
|
||||
|
||||
### Assets
|
||||
|
||||
| Method | Path | Description |
|
||||
|--------|------|-------------|
|
||||
| GET | `/assets` | List all assets |
|
||||
| GET | `/assets/{name}` | Get asset by name |
|
||||
| POST | `/assets` | Register new asset |
|
||||
| PATCH | `/assets/{name}` | Update asset metadata |
|
||||
| POST | `/assets/record-run` | Record L1 run as asset |
|
||||
| POST | `/assets/publish-cache` | Publish L1 cache item |
|
||||
| GET | `/assets/by-run-id/{run_id}` | Find asset by L1 run ID |
|
||||
|
||||
### ActivityPub
|
||||
|
||||
| Method | Path | Description |
|
||||
|--------|------|-------------|
|
||||
| GET | `/.well-known/webfinger` | Actor discovery |
|
||||
| GET | `/users/{username}` | Actor profile |
|
||||
| GET | `/users/{username}/outbox` | Published activities |
|
||||
| POST | `/users/{username}/inbox` | Receive activities |
|
||||
| GET | `/users/{username}/followers` | Followers list |
|
||||
| GET | `/objects/{hash}` | Get object by content hash |
|
||||
| GET | `/activities` | List activities (paginated) |
|
||||
| GET | `/activities/{ref}` | Get activity by reference |
|
||||
| GET | `/activity/{index}` | Get activity by index |
|
||||
|
||||
### OpenTimestamps Anchoring
|
||||
|
||||
| Method | Path | Description |
|
||||
|--------|------|-------------|
|
||||
| POST | `/anchors/create` | Create timestamp anchor |
|
||||
| GET | `/anchors` | List all anchors |
|
||||
| GET | `/anchors/{merkle_root}` | Get anchor details |
|
||||
| GET | `/anchors/{merkle_root}/tree` | Get merkle tree |
|
||||
| GET | `/anchors/verify/{activity_id}` | Verify activity timestamp |
|
||||
| POST | `/anchors/{merkle_root}/upgrade` | Upgrade pending timestamp |
|
||||
| GET | `/anchors/ui` | Anchor management UI |
|
||||
| POST | `/anchors/test-ots` | Test OTS functionality |
|
||||
|
||||
### Renderers (L1 Connections)
|
||||
|
||||
| Method | Path | Description |
|
||||
|--------|------|-------------|
|
||||
| GET | `/renderers` | List attached L1 servers |
|
||||
| GET | `/renderers/attach` | Initiate L1 attachment |
|
||||
| POST | `/renderers/detach` | Detach from L1 server |
|
||||
|
||||
### Storage Providers
|
||||
|
||||
| Method | Path | Description |
|
||||
|--------|------|-------------|
|
||||
| GET | `/storage` | List storage providers |
|
||||
| POST | `/storage` | Add provider (form) |
|
||||
| POST | `/storage/add` | Add provider (JSON) |
|
||||
| GET | `/storage/{id}` | Get provider details |
|
||||
| PATCH | `/storage/{id}` | Update provider |
|
||||
| DELETE | `/storage/{id}` | Delete provider |
|
||||
| POST | `/storage/{id}/test` | Test connection |
|
||||
| GET | `/storage/type/{type}` | Get form for provider type |
|
||||
|
||||
## L1 Renderer Integration
|
||||
|
||||
L2 coordinates with L1 rendering servers for distributed processing.
|
||||
|
||||
### Configuration
|
||||
|
||||
```bash
|
||||
# Single L1 server
|
||||
export L1_SERVERS=https://celery-artdag.rose-ash.com
|
||||
|
||||
# Multiple L1 servers
|
||||
export L1_SERVERS=https://server1.example.com,https://server2.example.com
|
||||
```
|
||||
|
||||
### Attachment Flow
|
||||
|
||||
1. User visits `/renderers` and clicks "Attach"
|
||||
2. L2 creates a **scoped token** bound to the specific L1
|
||||
3. User redirected to L1's `/auth?auth_token=...`
|
||||
4. L1 calls L2's `/auth/verify` to validate
|
||||
5. L2 checks token scope matches requesting L1
|
||||
6. L1 sets local cookie, attachment recorded in `user_renderers`
|
||||
|
||||
### Security
|
||||
|
||||
- **Scoped tokens**: Tokens bound to specific L1; can't be used elsewhere
|
||||
- **No shared secrets**: L1 verifies via L2's `/auth/verify` endpoint
|
||||
- **Federated logout**: L2 revokes tokens on all attached L1s
|
||||
|
||||
## OpenTimestamps Anchoring
|
||||
|
||||
Cryptographic proof of existence using Bitcoin blockchain.
|
||||
|
||||
### How It Works
|
||||
|
||||
1. Activities are collected into merkle trees
|
||||
2. Merkle root submitted to Bitcoin via OpenTimestamps
|
||||
3. Pending proofs upgraded when Bitcoin confirms
|
||||
4. Final proof verifiable without trusted third parties
|
||||
|
||||
### Verification
|
||||
|
||||
```bash
|
||||
# Verify an activity's timestamp
|
||||
curl https://artdag.example.com/anchors/verify/123
|
||||
|
||||
# Returns:
|
||||
{
|
||||
"activity_id": 123,
|
||||
"merkle_root": "abc123...",
|
||||
"status": "confirmed",
|
||||
"bitcoin_block": 800000,
|
||||
"verified_at": "2026-01-01T..."
|
||||
}
|
||||
```
|
||||
|
||||
## Data Model
|
||||
|
||||
### PostgreSQL Tables
|
||||
|
||||
| Table | Description |
|
||||
|-------|-------------|
|
||||
| `users` | Registered users with hashed passwords |
|
||||
| `assets` | Asset registry with content hashes |
|
||||
| `activities` | Signed ActivityPub activities |
|
||||
| `followers` | Follower relationships |
|
||||
| `anchors` | OpenTimestamps anchor records |
|
||||
| `anchor_activities` | Activity-to-anchor mappings |
|
||||
| `user_renderers` | L1 attachment records |
|
||||
| `revoked_tokens` | Token revocation list |
|
||||
| `storage_providers` | Storage configurations |
|
||||
|
||||
### Asset Structure
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "my-video",
|
||||
"content_hash": "sha3-256:abc123...",
|
||||
"asset_type": "video",
|
||||
"owner": "@giles@artdag.rose-ash.com",
|
||||
"created_at": "2026-01-01T...",
|
||||
"provenance": {
|
||||
"inputs": [...],
|
||||
"recipe": "beat-sync",
|
||||
"l1_server": "https://celery-artdag.rose-ash.com",
|
||||
"run_id": "..."
|
||||
},
|
||||
"tags": ["art", "generated"]
|
||||
}
|
||||
```
|
||||
|
||||
### Activity Structure
|
||||
|
||||
```json
|
||||
{
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Create",
|
||||
"actor": "https://artdag.rose-ash.com/users/giles",
|
||||
"object": {
|
||||
"type": "Document",
|
||||
"name": "my-video",
|
||||
"content": "sha3-256:abc123...",
|
||||
"attributedTo": "https://artdag.rose-ash.com/users/giles"
|
||||
},
|
||||
"published": "2026-01-01T..."
|
||||
}
|
||||
```
|
||||
|
||||
## CLI Commands
|
||||
|
||||
### Register Asset
|
||||
|
||||
```bash
|
||||
curl -X POST https://artdag.example.com/assets \
|
||||
-H "Authorization: Bearer <token>" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"name": "my-video",
|
||||
"content_hash": "abc123...",
|
||||
"asset_type": "video",
|
||||
"tags": ["art", "generated"]
|
||||
}'
|
||||
```
|
||||
|
||||
### Record L1 Run
|
||||
|
||||
```bash
|
||||
curl -X POST https://artdag.example.com/assets/record-run \
|
||||
-H "Authorization: Bearer <token>" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"run_id": "uuid-from-l1",
|
||||
"l1_server": "https://celery-artdag.rose-ash.com",
|
||||
"output_name": "my-rendered-video"
|
||||
}'
|
||||
```
|
||||
|
||||
### Publish L1 Cache Item
|
||||
|
||||
```bash
|
||||
curl -X POST https://artdag.example.com/assets/publish-cache \
|
||||
-H "Authorization: Bearer <token>" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"content_hash": "abc123...",
|
||||
"l1_server": "https://celery-artdag.rose-ash.com",
|
||||
"name": "my-asset",
|
||||
"asset_type": "video"
|
||||
}'
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
L2 Server (FastAPI)
|
||||
│
|
||||
├── Web UI (Jinja2 + HTMX + Tailwind)
|
||||
│
|
||||
├── /assets → Asset Registry
|
||||
│ │
|
||||
│ └── PostgreSQL (assets table)
|
||||
│
|
||||
├── /users/{user}/outbox → ActivityPub
|
||||
│ │
|
||||
│ ├── Sign activities (RSA)
|
||||
│ └── PostgreSQL (activities table)
|
||||
│
|
||||
├── /anchors → OpenTimestamps
|
||||
│ │
|
||||
│ ├── Merkle tree construction
|
||||
│ └── Bitcoin anchoring
|
||||
│
|
||||
├── /auth/verify → L1 Token Verification
|
||||
│ │
|
||||
│ └── Scoped token validation
|
||||
│
|
||||
└── /storage → Storage Providers
|
||||
│
|
||||
├── S3 (boto3)
|
||||
├── IPFS (ipfs_client)
|
||||
└── Local filesystem
|
||||
```
|
||||
|
||||
## Federation
|
||||
|
||||
L2 implements ActivityPub for federated asset sharing.
|
||||
|
||||
### Discovery
|
||||
|
||||
```bash
|
||||
# Webfinger lookup
|
||||
curl "https://artdag.example.com/.well-known/webfinger?resource=acct:giles@artdag.example.com"
|
||||
```
|
||||
|
||||
### Actor Profile
|
||||
|
||||
```bash
|
||||
curl -H "Accept: application/activity+json" \
|
||||
https://artdag.example.com/users/giles
|
||||
```
|
||||
|
||||
### Outbox
|
||||
|
||||
```bash
|
||||
curl -H "Accept: application/activity+json" \
|
||||
https://artdag.example.com/users/giles/outbox
|
||||
```
|
||||
334
l2/anchoring.py
Normal file
334
l2/anchoring.py
Normal file
@@ -0,0 +1,334 @@
|
||||
# art-activity-pub/anchoring.py
|
||||
"""
|
||||
Merkle tree anchoring to Bitcoin via OpenTimestamps.
|
||||
|
||||
Provides provable timestamps for ActivityPub activities without running
|
||||
our own blockchain. Activities are hashed into a merkle tree, the root
|
||||
is submitted to OpenTimestamps (free), and the proof is stored on IPFS.
|
||||
|
||||
The merkle tree + OTS proof provides cryptographic evidence that
|
||||
activities existed at a specific time, anchored to Bitcoin.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Backup file location (should be on persistent volume)
|
||||
ANCHOR_BACKUP_DIR = Path(os.getenv("ANCHOR_BACKUP_DIR", "/data/anchors"))
|
||||
ANCHOR_BACKUP_FILE = ANCHOR_BACKUP_DIR / "anchors.jsonl"
|
||||
|
||||
# OpenTimestamps calendar servers
|
||||
OTS_SERVERS = [
|
||||
"https://a.pool.opentimestamps.org",
|
||||
"https://b.pool.opentimestamps.org",
|
||||
"https://a.pool.eternitywall.com",
|
||||
]
|
||||
|
||||
|
||||
def _ensure_backup_dir():
|
||||
"""Ensure backup directory exists."""
|
||||
ANCHOR_BACKUP_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def build_merkle_tree(items: List[str]) -> Optional[dict]:
|
||||
"""
|
||||
Build a merkle tree from a list of strings (activity IDs).
|
||||
|
||||
Args:
|
||||
items: List of activity IDs to include
|
||||
|
||||
Returns:
|
||||
Dict with root, tree structure, and metadata, or None if empty
|
||||
"""
|
||||
if not items:
|
||||
return None
|
||||
|
||||
# Sort for deterministic ordering
|
||||
items = sorted(items)
|
||||
|
||||
# Hash each item to create leaves
|
||||
leaves = [hashlib.sha256(item.encode()).hexdigest() for item in items]
|
||||
|
||||
# Build tree bottom-up
|
||||
tree_levels = [leaves]
|
||||
current_level = leaves
|
||||
|
||||
while len(current_level) > 1:
|
||||
next_level = []
|
||||
for i in range(0, len(current_level), 2):
|
||||
left = current_level[i]
|
||||
# If odd number, duplicate last node
|
||||
right = current_level[i + 1] if i + 1 < len(current_level) else left
|
||||
# Hash pair together
|
||||
combined = hashlib.sha256((left + right).encode()).hexdigest()
|
||||
next_level.append(combined)
|
||||
tree_levels.append(next_level)
|
||||
current_level = next_level
|
||||
|
||||
root = current_level[0]
|
||||
|
||||
return {
|
||||
"root": root,
|
||||
"tree": tree_levels,
|
||||
"items": items,
|
||||
"item_count": len(items),
|
||||
"created_at": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
|
||||
def get_merkle_proof(tree: dict, item: str) -> Optional[List[dict]]:
|
||||
"""
|
||||
Get merkle proof for a specific item.
|
||||
|
||||
Args:
|
||||
tree: Merkle tree dict from build_merkle_tree
|
||||
item: The item to prove membership for
|
||||
|
||||
Returns:
|
||||
List of proof steps, or None if item not in tree
|
||||
"""
|
||||
items = tree["items"]
|
||||
if item not in items:
|
||||
return None
|
||||
|
||||
# Find leaf index
|
||||
sorted_items = sorted(items)
|
||||
leaf_index = sorted_items.index(item)
|
||||
leaf_hash = hashlib.sha256(item.encode()).hexdigest()
|
||||
|
||||
proof = []
|
||||
tree_levels = tree["tree"]
|
||||
current_index = leaf_index
|
||||
|
||||
for level in tree_levels[:-1]: # Skip root level
|
||||
sibling_index = current_index ^ 1 # XOR to get sibling
|
||||
if sibling_index < len(level):
|
||||
sibling_hash = level[sibling_index]
|
||||
proof.append({
|
||||
"hash": sibling_hash,
|
||||
"position": "right" if current_index % 2 == 0 else "left"
|
||||
})
|
||||
current_index //= 2
|
||||
|
||||
return proof
|
||||
|
||||
|
||||
def verify_merkle_proof(item: str, proof: List[dict], root: str) -> bool:
|
||||
"""
|
||||
Verify a merkle proof.
|
||||
|
||||
Args:
|
||||
item: The item to verify
|
||||
proof: Proof steps from get_merkle_proof
|
||||
root: Expected merkle root
|
||||
|
||||
Returns:
|
||||
True if proof is valid
|
||||
"""
|
||||
current_hash = hashlib.sha256(item.encode()).hexdigest()
|
||||
|
||||
for step in proof:
|
||||
sibling = step["hash"]
|
||||
if step["position"] == "right":
|
||||
combined = current_hash + sibling
|
||||
else:
|
||||
combined = sibling + current_hash
|
||||
current_hash = hashlib.sha256(combined.encode()).hexdigest()
|
||||
|
||||
return current_hash == root
|
||||
|
||||
|
||||
def submit_to_opentimestamps(hash_hex: str) -> Optional[bytes]:
|
||||
"""
|
||||
Submit a hash to OpenTimestamps for Bitcoin anchoring.
|
||||
|
||||
Args:
|
||||
hash_hex: Hex-encoded SHA256 hash to timestamp
|
||||
|
||||
Returns:
|
||||
Incomplete .ots proof bytes, or None on failure
|
||||
|
||||
Note:
|
||||
The returned proof is "incomplete" - it becomes complete
|
||||
after Bitcoin confirms (usually 1-2 hours). Use upgrade_ots_proof
|
||||
to get the complete proof later.
|
||||
"""
|
||||
hash_bytes = bytes.fromhex(hash_hex)
|
||||
|
||||
for server in OTS_SERVERS:
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{server}/digest",
|
||||
data=hash_bytes,
|
||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||
timeout=10
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
logger.info(f"Submitted to OpenTimestamps via {server}")
|
||||
return resp.content
|
||||
except Exception as e:
|
||||
logger.warning(f"OTS server {server} failed: {e}")
|
||||
continue
|
||||
|
||||
logger.error("All OpenTimestamps servers failed")
|
||||
return None
|
||||
|
||||
|
||||
def upgrade_ots_proof(ots_proof: bytes) -> Optional[bytes]:
|
||||
"""
|
||||
Upgrade an incomplete OTS proof to a complete Bitcoin-anchored proof.
|
||||
|
||||
Args:
|
||||
ots_proof: Incomplete .ots proof bytes
|
||||
|
||||
Returns:
|
||||
Complete .ots proof bytes, or None if not yet confirmed
|
||||
|
||||
Note:
|
||||
This should be called periodically (e.g., hourly) until
|
||||
the proof is complete. Bitcoin confirmation takes ~1-2 hours.
|
||||
"""
|
||||
for server in OTS_SERVERS:
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{server}/upgrade",
|
||||
data=ots_proof,
|
||||
headers={"Content-Type": "application/octet-stream"},
|
||||
timeout=10
|
||||
)
|
||||
if resp.status_code == 200 and len(resp.content) > len(ots_proof):
|
||||
logger.info(f"OTS proof upgraded via {server}")
|
||||
return resp.content
|
||||
except Exception as e:
|
||||
logger.warning(f"OTS upgrade via {server} failed: {e}")
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def append_to_backup(anchor_record: dict):
|
||||
"""
|
||||
Append anchor record to persistent JSONL backup file.
|
||||
|
||||
Args:
|
||||
anchor_record: Dict with anchor metadata
|
||||
"""
|
||||
_ensure_backup_dir()
|
||||
|
||||
with open(ANCHOR_BACKUP_FILE, "a") as f:
|
||||
f.write(json.dumps(anchor_record, sort_keys=True) + "\n")
|
||||
|
||||
logger.info(f"Anchor backed up to {ANCHOR_BACKUP_FILE}")
|
||||
|
||||
|
||||
def load_backup_anchors() -> List[dict]:
|
||||
"""
|
||||
Load all anchors from backup file.
|
||||
|
||||
Returns:
|
||||
List of anchor records
|
||||
"""
|
||||
if not ANCHOR_BACKUP_FILE.exists():
|
||||
return []
|
||||
|
||||
anchors = []
|
||||
with open(ANCHOR_BACKUP_FILE, "r") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line:
|
||||
try:
|
||||
anchors.append(json.loads(line))
|
||||
except json.JSONDecodeError:
|
||||
logger.warning(f"Invalid JSON in backup: {line[:50]}...")
|
||||
|
||||
return anchors
|
||||
|
||||
|
||||
def get_latest_anchor_from_backup() -> Optional[dict]:
|
||||
"""Get the most recent anchor from backup."""
|
||||
anchors = load_backup_anchors()
|
||||
return anchors[-1] if anchors else None
|
||||
|
||||
|
||||
async def create_anchor(
|
||||
activity_ids: List[str],
|
||||
db_module,
|
||||
ipfs_module
|
||||
) -> Optional[dict]:
|
||||
"""
|
||||
Create a new anchor for a batch of activities.
|
||||
|
||||
Args:
|
||||
activity_ids: List of activity UUIDs to anchor
|
||||
db_module: Database module with anchor functions
|
||||
ipfs_module: IPFS client module
|
||||
|
||||
Returns:
|
||||
Anchor record dict, or None on failure
|
||||
"""
|
||||
if not activity_ids:
|
||||
logger.info("No activities to anchor")
|
||||
return None
|
||||
|
||||
# Build merkle tree
|
||||
tree = build_merkle_tree(activity_ids)
|
||||
if not tree:
|
||||
return None
|
||||
|
||||
root = tree["root"]
|
||||
logger.info(f"Built merkle tree: {len(activity_ids)} activities, root={root[:16]}...")
|
||||
|
||||
# Store tree on IPFS
|
||||
try:
|
||||
tree_cid = ipfs_module.add_json(tree)
|
||||
logger.info(f"Merkle tree stored on IPFS: {tree_cid}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store tree on IPFS: {e}")
|
||||
tree_cid = None
|
||||
|
||||
# Submit to OpenTimestamps
|
||||
ots_proof = submit_to_opentimestamps(root)
|
||||
|
||||
# Store OTS proof on IPFS too
|
||||
ots_cid = None
|
||||
if ots_proof and ipfs_module:
|
||||
try:
|
||||
ots_cid = ipfs_module.add_bytes(ots_proof)
|
||||
logger.info(f"OTS proof stored on IPFS: {ots_cid}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to store OTS proof on IPFS: {e}")
|
||||
|
||||
# Create anchor record
|
||||
anchor_record = {
|
||||
"merkle_root": root,
|
||||
"tree_ipfs_cid": tree_cid,
|
||||
"ots_proof_cid": ots_cid,
|
||||
"activity_count": len(activity_ids),
|
||||
"first_activity_id": activity_ids[0],
|
||||
"last_activity_id": activity_ids[-1],
|
||||
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||
"confirmed_at": None,
|
||||
"bitcoin_txid": None
|
||||
}
|
||||
|
||||
# Save to database
|
||||
if db_module:
|
||||
try:
|
||||
await db_module.create_anchor(anchor_record)
|
||||
await db_module.mark_activities_anchored(activity_ids, root)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save anchor to database: {e}")
|
||||
|
||||
# Append to backup file (persistent)
|
||||
append_to_backup(anchor_record)
|
||||
|
||||
return anchor_record
|
||||
116
l2/app/__init__.py
Normal file
116
l2/app/__init__.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
Art-DAG L2 Server Application Factory.
|
||||
|
||||
Creates and configures the FastAPI application with all routers and middleware.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse, HTMLResponse
|
||||
|
||||
from artdag_common import create_jinja_env
|
||||
from artdag_common.middleware.auth import get_user_from_cookie
|
||||
|
||||
from .config import settings
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Manage database connection pool lifecycle."""
|
||||
import db
|
||||
await db.init_pool()
|
||||
yield
|
||||
await db.close_pool()
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
"""
|
||||
Create and configure the L2 FastAPI application.
|
||||
|
||||
Returns:
|
||||
Configured FastAPI instance
|
||||
"""
|
||||
app = FastAPI(
|
||||
title="Art-DAG L2 Server",
|
||||
description="ActivityPub server for Art-DAG ownership and federation",
|
||||
version="1.0.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
# Coop fragment pre-fetch — inject nav-tree, auth-menu, cart-mini
|
||||
_FRAG_SKIP = ("/auth/", "/.well-known/", "/health",
|
||||
"/internal/", "/static/", "/inbox")
|
||||
|
||||
@app.middleware("http")
|
||||
async def coop_fragments_middleware(request: Request, call_next):
|
||||
path = request.url.path
|
||||
if (
|
||||
request.method != "GET"
|
||||
or any(path.startswith(p) for p in _FRAG_SKIP)
|
||||
or request.headers.get("hx-request")
|
||||
):
|
||||
request.state.nav_tree_html = ""
|
||||
request.state.auth_menu_html = ""
|
||||
request.state.cart_mini_html = ""
|
||||
return await call_next(request)
|
||||
|
||||
from artdag_common.fragments import fetch_fragments as _fetch_frags
|
||||
|
||||
user = get_user_from_cookie(request)
|
||||
auth_params = {"email": user.email} if user and user.email else {}
|
||||
nav_params = {"app_name": "artdag", "path": path}
|
||||
|
||||
try:
|
||||
nav_tree_html, auth_menu_html, cart_mini_html = await _fetch_frags([
|
||||
("blog", "nav-tree", nav_params),
|
||||
("account", "auth-menu", auth_params or None),
|
||||
("cart", "cart-mini", None),
|
||||
])
|
||||
except Exception:
|
||||
nav_tree_html = auth_menu_html = cart_mini_html = ""
|
||||
|
||||
request.state.nav_tree_html = nav_tree_html
|
||||
request.state.auth_menu_html = auth_menu_html
|
||||
request.state.cart_mini_html = cart_mini_html
|
||||
|
||||
return await call_next(request)
|
||||
|
||||
# Initialize Jinja2 templates
|
||||
template_dir = Path(__file__).parent / "templates"
|
||||
app.state.templates = create_jinja_env(template_dir)
|
||||
|
||||
# Custom 404 handler
|
||||
@app.exception_handler(404)
|
||||
async def not_found_handler(request: Request, exc):
|
||||
from artdag_common.middleware import wants_html
|
||||
if wants_html(request):
|
||||
from artdag_common import render
|
||||
return render(app.state.templates, "404.html", request,
|
||||
user=None,
|
||||
)
|
||||
return JSONResponse({"detail": "Not found"}, status_code=404)
|
||||
|
||||
# Include routers
|
||||
from .routers import auth, assets, activities, anchors, storage, users, renderers
|
||||
|
||||
# Root routes
|
||||
app.include_router(auth.router, prefix="/auth", tags=["auth"])
|
||||
app.include_router(users.router, tags=["users"])
|
||||
|
||||
# Feature routers
|
||||
app.include_router(assets.router, prefix="/assets", tags=["assets"])
|
||||
app.include_router(activities.router, prefix="/activities", tags=["activities"])
|
||||
app.include_router(anchors.router, prefix="/anchors", tags=["anchors"])
|
||||
app.include_router(storage.router, prefix="/storage", tags=["storage"])
|
||||
app.include_router(renderers.router, prefix="/renderers", tags=["renderers"])
|
||||
|
||||
# WebFinger and ActivityPub discovery
|
||||
from .routers import federation
|
||||
app.include_router(federation.router, tags=["federation"])
|
||||
|
||||
return app
|
||||
|
||||
|
||||
# Create the default app instance
|
||||
app = create_app()
|
||||
56
l2/app/config.py
Normal file
56
l2/app/config.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
L2 Server Configuration.
|
||||
|
||||
Environment-based settings for the ActivityPub server.
|
||||
"""
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@dataclass
|
||||
class Settings:
|
||||
"""L2 Server configuration."""
|
||||
|
||||
# Domain and URLs
|
||||
domain: str = os.environ.get("ARTDAG_DOMAIN", "artdag.rose-ash.com")
|
||||
l1_public_url: str = os.environ.get("L1_PUBLIC_URL", "https://celery-artdag.rose-ash.com")
|
||||
effects_repo_url: str = os.environ.get("EFFECTS_REPO_URL", "https://git.rose-ash.com/art-dag/effects")
|
||||
ipfs_gateway_url: str = os.environ.get("IPFS_GATEWAY_URL", "")
|
||||
|
||||
# L1 servers
|
||||
l1_servers: list = None
|
||||
|
||||
# Cookie domain for cross-subdomain auth
|
||||
cookie_domain: str = None
|
||||
|
||||
# Data directory
|
||||
data_dir: Path = None
|
||||
|
||||
# JWT settings
|
||||
jwt_secret: str = os.environ.get("JWT_SECRET", "")
|
||||
jwt_algorithm: str = "HS256"
|
||||
access_token_expire_minutes: int = 60 * 24 * 30 # 30 days
|
||||
|
||||
def __post_init__(self):
|
||||
# Parse L1 servers
|
||||
l1_str = os.environ.get("L1_SERVERS", "https://celery-artdag.rose-ash.com")
|
||||
self.l1_servers = [s.strip() for s in l1_str.split(",") if s.strip()]
|
||||
|
||||
# Cookie domain
|
||||
env_cookie = os.environ.get("COOKIE_DOMAIN")
|
||||
if env_cookie:
|
||||
self.cookie_domain = env_cookie
|
||||
else:
|
||||
parts = self.domain.split(".")
|
||||
if len(parts) >= 2:
|
||||
self.cookie_domain = "." + ".".join(parts[-2:])
|
||||
|
||||
# Data directory
|
||||
self.data_dir = Path(os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2")))
|
||||
self.data_dir.mkdir(parents=True, exist_ok=True)
|
||||
(self.data_dir / "assets").mkdir(exist_ok=True)
|
||||
|
||||
|
||||
settings = Settings()
|
||||
80
l2/app/dependencies.py
Normal file
80
l2/app/dependencies.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""
|
||||
L2 Server Dependency Injection.
|
||||
|
||||
Provides common dependencies for routes.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Request, HTTPException, Depends
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
|
||||
from .config import settings
|
||||
|
||||
security = HTTPBearer(auto_error=False)
|
||||
|
||||
|
||||
def get_templates(request: Request):
|
||||
"""Get Jinja2 templates from app state."""
|
||||
return request.app.state.templates
|
||||
|
||||
|
||||
async def get_current_user(request: Request) -> Optional[dict]:
|
||||
"""
|
||||
Get current user from cookie or header.
|
||||
|
||||
Returns user dict or None if not authenticated.
|
||||
"""
|
||||
from auth import verify_token, get_token_claims
|
||||
|
||||
# Try cookie first
|
||||
token = request.cookies.get("auth_token")
|
||||
|
||||
# Try Authorization header
|
||||
if not token:
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if auth_header.startswith("Bearer "):
|
||||
token = auth_header[7:]
|
||||
|
||||
if not token:
|
||||
return None
|
||||
|
||||
# Verify token
|
||||
username = verify_token(token)
|
||||
if not username:
|
||||
return None
|
||||
|
||||
# Get full claims
|
||||
claims = get_token_claims(token)
|
||||
if not claims:
|
||||
return None
|
||||
|
||||
return {
|
||||
"username": username,
|
||||
"actor_id": f"https://{settings.domain}/users/{username}",
|
||||
"token": token,
|
||||
**claims,
|
||||
}
|
||||
|
||||
|
||||
async def require_auth(request: Request) -> dict:
|
||||
"""
|
||||
Require authentication.
|
||||
|
||||
Raises HTTPException 401 if not authenticated.
|
||||
"""
|
||||
user = await get_current_user(request)
|
||||
if not user:
|
||||
raise HTTPException(401, "Authentication required")
|
||||
return user
|
||||
|
||||
|
||||
def get_user_from_cookie(request: Request) -> Optional[str]:
|
||||
"""Get username from cookie (for HTML pages)."""
|
||||
from auth import verify_token
|
||||
|
||||
token = request.cookies.get("auth_token")
|
||||
if not token:
|
||||
return None
|
||||
|
||||
return verify_token(token)
|
||||
25
l2/app/routers/__init__.py
Normal file
25
l2/app/routers/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""
|
||||
L2 Server Routers.
|
||||
|
||||
Each router handles a specific domain of functionality.
|
||||
"""
|
||||
|
||||
from . import auth
|
||||
from . import assets
|
||||
from . import activities
|
||||
from . import anchors
|
||||
from . import storage
|
||||
from . import users
|
||||
from . import renderers
|
||||
from . import federation
|
||||
|
||||
__all__ = [
|
||||
"auth",
|
||||
"assets",
|
||||
"activities",
|
||||
"anchors",
|
||||
"storage",
|
||||
"users",
|
||||
"renderers",
|
||||
"federation",
|
||||
]
|
||||
99
l2/app/routers/activities.py
Normal file
99
l2/app/routers/activities.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""
|
||||
Activity routes for L2 server.
|
||||
|
||||
Handles ActivityPub activities and outbox.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html, wants_json
|
||||
|
||||
from ..config import settings
|
||||
from ..dependencies import get_templates, require_auth, get_user_from_cookie
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_activities(
|
||||
request: Request,
|
||||
offset: int = 0,
|
||||
limit: int = 20,
|
||||
):
|
||||
"""List recent activities."""
|
||||
import db
|
||||
|
||||
username = get_user_from_cookie(request)
|
||||
|
||||
activities, total = await db.get_activities_paginated(limit=limit, offset=offset)
|
||||
has_more = offset + len(activities) < total
|
||||
|
||||
if wants_json(request):
|
||||
return {"activities": activities, "offset": offset, "limit": limit}
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "activities/list.html", request,
|
||||
activities=activities,
|
||||
user={"username": username} if username else None,
|
||||
offset=offset,
|
||||
limit=limit,
|
||||
has_more=has_more,
|
||||
active_tab="activities",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{activity_id}")
|
||||
async def get_activity(
|
||||
activity_id: str,
|
||||
request: Request,
|
||||
):
|
||||
"""Get activity details."""
|
||||
import db
|
||||
|
||||
activity = await db.get_activity(activity_id)
|
||||
if not activity:
|
||||
raise HTTPException(404, "Activity not found")
|
||||
|
||||
# ActivityPub response
|
||||
if "application/activity+json" in request.headers.get("accept", ""):
|
||||
return JSONResponse(
|
||||
content=activity.get("activity_json", activity),
|
||||
media_type="application/activity+json",
|
||||
)
|
||||
|
||||
if wants_json(request):
|
||||
return activity
|
||||
|
||||
username = get_user_from_cookie(request)
|
||||
templates = get_templates(request)
|
||||
return render(templates, "activities/detail.html", request,
|
||||
activity=activity,
|
||||
user={"username": username} if username else None,
|
||||
active_tab="activities",
|
||||
)
|
||||
|
||||
|
||||
@router.post("")
|
||||
async def create_activity(
|
||||
request: Request,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Create a new activity (internal use)."""
|
||||
import db
|
||||
import json
|
||||
|
||||
body = await request.json()
|
||||
|
||||
activity_id = await db.create_activity(
|
||||
actor=user["actor_id"],
|
||||
activity_type=body.get("type", "Create"),
|
||||
object_data=body.get("object"),
|
||||
)
|
||||
|
||||
return {"activity_id": activity_id, "created": True}
|
||||
203
l2/app/routers/anchors.py
Normal file
203
l2/app/routers/anchors.py
Normal file
@@ -0,0 +1,203 @@
|
||||
"""
|
||||
Anchor routes for L2 server.
|
||||
|
||||
Handles OpenTimestamps anchoring and verification.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException
|
||||
from fastapi.responses import HTMLResponse, FileResponse
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html, wants_json
|
||||
|
||||
from ..config import settings
|
||||
from ..dependencies import get_templates, require_auth, get_user_from_cookie
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_anchors(
|
||||
request: Request,
|
||||
offset: int = 0,
|
||||
limit: int = 20,
|
||||
):
|
||||
"""List user's anchors."""
|
||||
import db
|
||||
|
||||
username = get_user_from_cookie(request)
|
||||
if not username:
|
||||
if wants_json(request):
|
||||
raise HTTPException(401, "Authentication required")
|
||||
from fastapi.responses import RedirectResponse
|
||||
return RedirectResponse(url="/login", status_code=302)
|
||||
|
||||
anchors = await db.get_anchors_paginated(offset=offset, limit=limit)
|
||||
has_more = len(anchors) >= limit
|
||||
|
||||
if wants_json(request):
|
||||
return {"anchors": anchors, "offset": offset, "limit": limit}
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "anchors/list.html", request,
|
||||
anchors=anchors,
|
||||
user={"username": username},
|
||||
offset=offset,
|
||||
limit=limit,
|
||||
has_more=has_more,
|
||||
active_tab="anchors",
|
||||
)
|
||||
|
||||
|
||||
@router.post("")
|
||||
async def create_anchor(
|
||||
request: Request,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Create a new timestamp anchor."""
|
||||
import db
|
||||
import anchoring
|
||||
|
||||
body = await request.json()
|
||||
content_hash = body.get("content_hash")
|
||||
|
||||
if not content_hash:
|
||||
raise HTTPException(400, "content_hash required")
|
||||
|
||||
# Create OTS timestamp
|
||||
try:
|
||||
ots_data = await anchoring.create_timestamp(content_hash)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create timestamp: {e}")
|
||||
raise HTTPException(500, f"Timestamping failed: {e}")
|
||||
|
||||
# Save anchor
|
||||
anchor_id = await db.create_anchor(
|
||||
username=user["username"],
|
||||
content_hash=content_hash,
|
||||
ots_data=ots_data,
|
||||
)
|
||||
|
||||
return {
|
||||
"anchor_id": anchor_id,
|
||||
"content_hash": content_hash,
|
||||
"status": "pending",
|
||||
"message": "Anchor created, pending Bitcoin confirmation",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{anchor_id}")
|
||||
async def get_anchor(
|
||||
anchor_id: str,
|
||||
request: Request,
|
||||
):
|
||||
"""Get anchor details."""
|
||||
import db
|
||||
|
||||
anchor = await db.get_anchor(anchor_id)
|
||||
if not anchor:
|
||||
raise HTTPException(404, "Anchor not found")
|
||||
|
||||
if wants_json(request):
|
||||
return anchor
|
||||
|
||||
username = get_user_from_cookie(request)
|
||||
templates = get_templates(request)
|
||||
return render(templates, "anchors/detail.html", request,
|
||||
anchor=anchor,
|
||||
user={"username": username} if username else None,
|
||||
active_tab="anchors",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{anchor_id}/ots")
|
||||
async def download_ots(anchor_id: str):
|
||||
"""Download OTS proof file."""
|
||||
import db
|
||||
|
||||
anchor = await db.get_anchor(anchor_id)
|
||||
if not anchor:
|
||||
raise HTTPException(404, "Anchor not found")
|
||||
|
||||
ots_data = anchor.get("ots_data")
|
||||
if not ots_data:
|
||||
raise HTTPException(404, "OTS data not available")
|
||||
|
||||
# Return as file download
|
||||
from fastapi.responses import Response
|
||||
return Response(
|
||||
content=ots_data,
|
||||
media_type="application/octet-stream",
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename={anchor['content_hash']}.ots"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{anchor_id}/verify")
|
||||
async def verify_anchor(
|
||||
anchor_id: str,
|
||||
request: Request,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Verify anchor status (check Bitcoin confirmation)."""
|
||||
import db
|
||||
import anchoring
|
||||
|
||||
anchor = await db.get_anchor(anchor_id)
|
||||
if not anchor:
|
||||
raise HTTPException(404, "Anchor not found")
|
||||
|
||||
try:
|
||||
result = await anchoring.verify_timestamp(
|
||||
anchor["content_hash"],
|
||||
anchor["ots_data"],
|
||||
)
|
||||
|
||||
# Update anchor status
|
||||
if result.get("confirmed"):
|
||||
await db.update_anchor(
|
||||
anchor_id,
|
||||
status="confirmed",
|
||||
bitcoin_block=result.get("block_height"),
|
||||
confirmed_at=result.get("confirmed_at"),
|
||||
)
|
||||
|
||||
if wants_html(request):
|
||||
if result.get("confirmed"):
|
||||
return HTMLResponse(
|
||||
f'<span class="text-green-400">Confirmed in block {result["block_height"]}</span>'
|
||||
)
|
||||
return HTMLResponse('<span class="text-yellow-400">Pending confirmation</span>')
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Verification failed: {e}")
|
||||
raise HTTPException(500, f"Verification failed: {e}")
|
||||
|
||||
|
||||
@router.delete("/{anchor_id}")
|
||||
async def delete_anchor(
|
||||
anchor_id: str,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Delete an anchor."""
|
||||
import db
|
||||
|
||||
anchor = await db.get_anchor(anchor_id)
|
||||
if not anchor:
|
||||
raise HTTPException(404, "Anchor not found")
|
||||
|
||||
if anchor.get("username") != user["username"]:
|
||||
raise HTTPException(403, "Not authorized")
|
||||
|
||||
success = await db.delete_anchor(anchor_id)
|
||||
if not success:
|
||||
raise HTTPException(400, "Failed to delete anchor")
|
||||
|
||||
return {"deleted": True}
|
||||
244
l2/app/routers/assets.py
Normal file
244
l2/app/routers/assets.py
Normal file
@@ -0,0 +1,244 @@
|
||||
"""
|
||||
Asset management routes for L2 server.
|
||||
|
||||
Handles asset registration, listing, and publishing.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional, List
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, Form
|
||||
from fastapi.responses import HTMLResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html, wants_json
|
||||
|
||||
from ..config import settings
|
||||
from ..dependencies import get_templates, require_auth, get_user_from_cookie
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AssetCreate(BaseModel):
|
||||
name: str
|
||||
content_hash: str
|
||||
ipfs_cid: Optional[str] = None
|
||||
asset_type: str # image, video, effect, recipe
|
||||
tags: List[str] = []
|
||||
metadata: dict = {}
|
||||
provenance: Optional[dict] = None
|
||||
|
||||
|
||||
class RecordRunRequest(BaseModel):
|
||||
run_id: str
|
||||
recipe: str
|
||||
inputs: List[str]
|
||||
output_hash: str
|
||||
ipfs_cid: Optional[str] = None
|
||||
provenance: Optional[dict] = None
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_assets(
|
||||
request: Request,
|
||||
offset: int = 0,
|
||||
limit: int = 20,
|
||||
asset_type: Optional[str] = None,
|
||||
):
|
||||
"""List user's assets."""
|
||||
import db
|
||||
|
||||
username = get_user_from_cookie(request)
|
||||
if not username:
|
||||
if wants_json(request):
|
||||
raise HTTPException(401, "Authentication required")
|
||||
from fastapi.responses import RedirectResponse
|
||||
return RedirectResponse(url="/login", status_code=302)
|
||||
|
||||
assets = await db.get_user_assets(username, offset=offset, limit=limit, asset_type=asset_type)
|
||||
has_more = len(assets) >= limit
|
||||
|
||||
if wants_json(request):
|
||||
return {"assets": assets, "offset": offset, "limit": limit, "has_more": has_more}
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "assets/list.html", request,
|
||||
assets=assets,
|
||||
user={"username": username},
|
||||
offset=offset,
|
||||
limit=limit,
|
||||
has_more=has_more,
|
||||
active_tab="assets",
|
||||
)
|
||||
|
||||
|
||||
@router.post("")
|
||||
async def create_asset(
|
||||
req: AssetCreate,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Register a new asset."""
|
||||
import db
|
||||
|
||||
asset = await db.create_asset({
|
||||
"owner": user["username"],
|
||||
"name": req.name,
|
||||
"content_hash": req.content_hash,
|
||||
"ipfs_cid": req.ipfs_cid,
|
||||
"asset_type": req.asset_type,
|
||||
"tags": req.tags or [],
|
||||
"metadata": req.metadata or {},
|
||||
"provenance": req.provenance,
|
||||
})
|
||||
|
||||
if not asset:
|
||||
raise HTTPException(400, "Failed to create asset")
|
||||
|
||||
return {"asset_id": asset.get("name"), "message": "Asset registered"}
|
||||
|
||||
|
||||
@router.get("/{asset_id}")
|
||||
async def get_asset(
|
||||
asset_id: str,
|
||||
request: Request,
|
||||
):
|
||||
"""Get asset details."""
|
||||
import db
|
||||
|
||||
username = get_user_from_cookie(request)
|
||||
|
||||
asset = await db.get_asset(asset_id)
|
||||
if not asset:
|
||||
raise HTTPException(404, "Asset not found")
|
||||
|
||||
if wants_json(request):
|
||||
return asset
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "assets/detail.html", request,
|
||||
asset=asset,
|
||||
user={"username": username} if username else None,
|
||||
active_tab="assets",
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{asset_id}")
|
||||
async def delete_asset(
|
||||
asset_id: str,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Delete an asset."""
|
||||
import db
|
||||
|
||||
asset = await db.get_asset(asset_id)
|
||||
if not asset:
|
||||
raise HTTPException(404, "Asset not found")
|
||||
|
||||
if asset.get("owner") != user["username"]:
|
||||
raise HTTPException(403, "Not authorized")
|
||||
|
||||
success = await db.delete_asset(asset_id)
|
||||
if not success:
|
||||
raise HTTPException(400, "Failed to delete asset")
|
||||
|
||||
return {"deleted": True}
|
||||
|
||||
|
||||
@router.post("/record-run")
|
||||
async def record_run(
|
||||
req: RecordRunRequest,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Record a run completion and register output as asset."""
|
||||
import db
|
||||
|
||||
# Create asset for output
|
||||
asset = await db.create_asset({
|
||||
"owner": user["username"],
|
||||
"name": f"{req.recipe}-{req.run_id[:8]}",
|
||||
"content_hash": req.output_hash,
|
||||
"ipfs_cid": req.ipfs_cid,
|
||||
"asset_type": "render",
|
||||
"metadata": {
|
||||
"run_id": req.run_id,
|
||||
"recipe": req.recipe,
|
||||
"inputs": req.inputs,
|
||||
},
|
||||
"provenance": req.provenance,
|
||||
})
|
||||
asset_id = asset.get("name") if asset else None
|
||||
|
||||
# Record run
|
||||
await db.record_run(
|
||||
run_id=req.run_id,
|
||||
username=user["username"],
|
||||
recipe=req.recipe,
|
||||
inputs=req.inputs or [],
|
||||
output_hash=req.output_hash,
|
||||
ipfs_cid=req.ipfs_cid,
|
||||
asset_id=asset_id,
|
||||
)
|
||||
|
||||
return {
|
||||
"run_id": req.run_id,
|
||||
"asset_id": asset_id,
|
||||
"recorded": True,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/by-run-id/{run_id}")
|
||||
async def get_asset_by_run_id(run_id: str):
|
||||
"""Get asset by run ID (for L1 cache lookup)."""
|
||||
import db
|
||||
|
||||
run = await db.get_run(run_id)
|
||||
if not run:
|
||||
raise HTTPException(404, "Run not found")
|
||||
|
||||
return {
|
||||
"run_id": run_id,
|
||||
"output_hash": run.get("output_hash"),
|
||||
"ipfs_cid": run.get("ipfs_cid"),
|
||||
"provenance_cid": run.get("provenance_cid"),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{asset_id}/publish")
|
||||
async def publish_asset(
|
||||
asset_id: str,
|
||||
request: Request,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Publish asset to IPFS."""
|
||||
import db
|
||||
import ipfs_client
|
||||
|
||||
asset = await db.get_asset(asset_id)
|
||||
if not asset:
|
||||
raise HTTPException(404, "Asset not found")
|
||||
|
||||
if asset.get("owner") != user["username"]:
|
||||
raise HTTPException(403, "Not authorized")
|
||||
|
||||
# Already published?
|
||||
if asset.get("ipfs_cid"):
|
||||
return {"ipfs_cid": asset["ipfs_cid"], "already_published": True}
|
||||
|
||||
# Get content from L1
|
||||
content_hash = asset.get("content_hash")
|
||||
for l1_url in settings.l1_servers:
|
||||
try:
|
||||
import requests
|
||||
resp = requests.get(f"{l1_url}/cache/{content_hash}/raw", timeout=30)
|
||||
if resp.status_code == 200:
|
||||
# Pin to IPFS
|
||||
cid = await ipfs_client.add_bytes(resp.content)
|
||||
if cid:
|
||||
await db.update_asset(asset_id, {"ipfs_cid": cid})
|
||||
return {"ipfs_cid": cid, "published": True}
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch from {l1_url}: {e}")
|
||||
|
||||
raise HTTPException(400, "Failed to publish - content not found on any L1")
|
||||
223
l2/app/routers/auth.py
Normal file
223
l2/app/routers/auth.py
Normal file
@@ -0,0 +1,223 @@
|
||||
"""
|
||||
Authentication routes for L2 server.
|
||||
|
||||
Handles login, registration, logout, and token verification.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import APIRouter, Request, Form, HTTPException, Depends
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html
|
||||
|
||||
from ..config import settings
|
||||
from ..dependencies import get_templates, get_user_from_cookie
|
||||
|
||||
router = APIRouter()
|
||||
security = HTTPBearer(auto_error=False)
|
||||
|
||||
|
||||
@router.get("/login", response_class=HTMLResponse)
|
||||
async def login_page(request: Request, return_to: str = None):
|
||||
"""Login page."""
|
||||
username = get_user_from_cookie(request)
|
||||
|
||||
if username:
|
||||
templates = get_templates(request)
|
||||
return render(templates, "auth/already_logged_in.html", request,
|
||||
user={"username": username},
|
||||
)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "auth/login.html", request,
|
||||
return_to=return_to,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/login", response_class=HTMLResponse)
|
||||
async def login_submit(
|
||||
request: Request,
|
||||
username: str = Form(...),
|
||||
password: str = Form(...),
|
||||
return_to: str = Form(None),
|
||||
):
|
||||
"""Handle login form submission."""
|
||||
from auth import authenticate_user, create_access_token
|
||||
|
||||
if not username or not password:
|
||||
return HTMLResponse(
|
||||
'<div class="text-red-400">Username and password are required</div>'
|
||||
)
|
||||
|
||||
user = await authenticate_user(settings.data_dir, username.strip(), password)
|
||||
if not user:
|
||||
return HTMLResponse(
|
||||
'<div class="text-red-400">Invalid username or password</div>'
|
||||
)
|
||||
|
||||
token = create_access_token(user.username, l2_server=f"https://{settings.domain}")
|
||||
|
||||
# Handle return_to redirect
|
||||
if return_to and return_to.startswith("http"):
|
||||
separator = "&" if "?" in return_to else "?"
|
||||
redirect_url = f"{return_to}{separator}auth_token={token.access_token}"
|
||||
response = HTMLResponse(f'''
|
||||
<div class="text-green-400">Login successful! Redirecting...</div>
|
||||
<script>window.location.href = "{redirect_url}";</script>
|
||||
''')
|
||||
else:
|
||||
response = HTMLResponse('''
|
||||
<div class="text-green-400">Login successful! Redirecting...</div>
|
||||
<script>window.location.href = "/";</script>
|
||||
''')
|
||||
|
||||
response.set_cookie(
|
||||
key="auth_token",
|
||||
value=token.access_token,
|
||||
httponly=True,
|
||||
max_age=60 * 60 * 24 * 30,
|
||||
samesite="lax",
|
||||
secure=True,
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/register", response_class=HTMLResponse)
|
||||
async def register_page(request: Request):
|
||||
"""Registration page."""
|
||||
username = get_user_from_cookie(request)
|
||||
|
||||
if username:
|
||||
templates = get_templates(request)
|
||||
return render(templates, "auth/already_logged_in.html", request,
|
||||
user={"username": username},
|
||||
)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "auth/register.html", request)
|
||||
|
||||
|
||||
@router.post("/register", response_class=HTMLResponse)
|
||||
async def register_submit(
|
||||
request: Request,
|
||||
username: str = Form(...),
|
||||
password: str = Form(...),
|
||||
password2: str = Form(...),
|
||||
email: str = Form(None),
|
||||
):
|
||||
"""Handle registration form submission."""
|
||||
from auth import create_user, create_access_token
|
||||
|
||||
if not username or not password:
|
||||
return HTMLResponse('<div class="text-red-400">Username and password are required</div>')
|
||||
|
||||
if password != password2:
|
||||
return HTMLResponse('<div class="text-red-400">Passwords do not match</div>')
|
||||
|
||||
if len(password) < 6:
|
||||
return HTMLResponse('<div class="text-red-400">Password must be at least 6 characters</div>')
|
||||
|
||||
try:
|
||||
user = await create_user(settings.data_dir, username.strip(), password, email)
|
||||
except ValueError as e:
|
||||
return HTMLResponse(f'<div class="text-red-400">{str(e)}</div>')
|
||||
|
||||
token = create_access_token(user.username, l2_server=f"https://{settings.domain}")
|
||||
|
||||
response = HTMLResponse('''
|
||||
<div class="text-green-400">Registration successful! Redirecting...</div>
|
||||
<script>window.location.href = "/";</script>
|
||||
''')
|
||||
response.set_cookie(
|
||||
key="auth_token",
|
||||
value=token.access_token,
|
||||
httponly=True,
|
||||
max_age=60 * 60 * 24 * 30,
|
||||
samesite="lax",
|
||||
secure=True,
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/logout")
|
||||
async def logout(request: Request):
|
||||
"""Handle logout."""
|
||||
import db
|
||||
import requests
|
||||
from auth import get_token_claims
|
||||
|
||||
token = request.cookies.get("auth_token")
|
||||
claims = get_token_claims(token) if token else None
|
||||
username = claims.get("sub") if claims else None
|
||||
|
||||
if username and token and claims:
|
||||
# Revoke token in database
|
||||
token_hash = hashlib.sha256(token.encode()).hexdigest()
|
||||
expires_at = datetime.fromtimestamp(claims.get("exp", 0), tz=timezone.utc)
|
||||
await db.revoke_token(token_hash, username, expires_at)
|
||||
|
||||
# Revoke on attached L1 servers
|
||||
attached = await db.get_user_renderers(username)
|
||||
for l1_url in attached:
|
||||
try:
|
||||
requests.post(
|
||||
f"{l1_url}/auth/revoke-user",
|
||||
json={"username": username, "l2_server": f"https://{settings.domain}"},
|
||||
timeout=5,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
response = RedirectResponse(url="/", status_code=302)
|
||||
response.delete_cookie("auth_token")
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/verify")
|
||||
async def verify_token(
|
||||
request: Request,
|
||||
credentials: HTTPAuthorizationCredentials = Depends(security),
|
||||
):
|
||||
"""
|
||||
Verify a token is valid.
|
||||
|
||||
Called by L1 servers to verify tokens during auth callback.
|
||||
Returns user info if valid, 401 if not.
|
||||
"""
|
||||
import db
|
||||
from auth import verify_token as verify_jwt, get_token_claims
|
||||
|
||||
# Get token from Authorization header or query param
|
||||
token = None
|
||||
if credentials:
|
||||
token = credentials.credentials
|
||||
else:
|
||||
# Try Authorization header manually (for clients that don't use Bearer format)
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if auth_header.startswith("Bearer "):
|
||||
token = auth_header[7:]
|
||||
|
||||
if not token:
|
||||
raise HTTPException(401, "No token provided")
|
||||
|
||||
# Verify JWT signature and expiry
|
||||
username = verify_jwt(token)
|
||||
if not username:
|
||||
raise HTTPException(401, "Invalid or expired token")
|
||||
|
||||
# Check if token is revoked
|
||||
claims = get_token_claims(token)
|
||||
if claims:
|
||||
token_hash = hashlib.sha256(token.encode()).hexdigest()
|
||||
if await db.is_token_revoked(token_hash):
|
||||
raise HTTPException(401, "Token has been revoked")
|
||||
|
||||
return {
|
||||
"valid": True,
|
||||
"username": username,
|
||||
"claims": claims,
|
||||
}
|
||||
115
l2/app/routers/federation.py
Normal file
115
l2/app/routers/federation.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""
|
||||
Federation routes for L2 server.
|
||||
|
||||
Handles WebFinger, nodeinfo, and ActivityPub discovery.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Request, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from ..config import settings
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("/.well-known/webfinger")
|
||||
async def webfinger(resource: str):
|
||||
"""WebFinger endpoint for actor discovery."""
|
||||
import db
|
||||
|
||||
# Parse resource (acct:username@domain)
|
||||
if not resource.startswith("acct:"):
|
||||
raise HTTPException(400, "Invalid resource format")
|
||||
|
||||
parts = resource[5:].split("@")
|
||||
if len(parts) != 2:
|
||||
raise HTTPException(400, "Invalid resource format")
|
||||
|
||||
username, domain = parts
|
||||
|
||||
if domain != settings.domain:
|
||||
raise HTTPException(404, "User not on this server")
|
||||
|
||||
user = await db.get_user(username)
|
||||
if not user:
|
||||
raise HTTPException(404, "User not found")
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"subject": resource,
|
||||
"aliases": [f"https://{settings.domain}/users/{username}"],
|
||||
"links": [
|
||||
{
|
||||
"rel": "self",
|
||||
"type": "application/activity+json",
|
||||
"href": f"https://{settings.domain}/users/{username}",
|
||||
},
|
||||
{
|
||||
"rel": "http://webfinger.net/rel/profile-page",
|
||||
"type": "text/html",
|
||||
"href": f"https://{settings.domain}/users/{username}",
|
||||
},
|
||||
],
|
||||
},
|
||||
media_type="application/jrd+json",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/.well-known/nodeinfo")
|
||||
async def nodeinfo_index():
|
||||
"""NodeInfo index."""
|
||||
return JSONResponse(
|
||||
content={
|
||||
"links": [
|
||||
{
|
||||
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
|
||||
"href": f"https://{settings.domain}/nodeinfo/2.0",
|
||||
}
|
||||
]
|
||||
},
|
||||
media_type="application/json",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/nodeinfo/2.0")
|
||||
async def nodeinfo():
|
||||
"""NodeInfo 2.0 endpoint."""
|
||||
import db
|
||||
|
||||
user_count = await db.count_users()
|
||||
activity_count = await db.count_activities()
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"version": "2.0",
|
||||
"software": {
|
||||
"name": "artdag",
|
||||
"version": "1.0.0",
|
||||
},
|
||||
"protocols": ["activitypub"],
|
||||
"usage": {
|
||||
"users": {"total": user_count, "activeMonth": user_count},
|
||||
"localPosts": activity_count,
|
||||
},
|
||||
"openRegistrations": True,
|
||||
"metadata": {
|
||||
"nodeName": "Art-DAG",
|
||||
"nodeDescription": "Content-addressable media processing with ActivityPub federation",
|
||||
},
|
||||
},
|
||||
media_type="application/json",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/.well-known/host-meta")
|
||||
async def host_meta():
|
||||
"""Host-meta endpoint."""
|
||||
xml = f'''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0">
|
||||
<Link rel="lrdd" type="application/xrd+xml" template="https://{settings.domain}/.well-known/webfinger?resource={{uri}}"/>
|
||||
</XRD>'''
|
||||
from fastapi.responses import Response
|
||||
return Response(content=xml, media_type="application/xrd+xml")
|
||||
93
l2/app/routers/renderers.py
Normal file
93
l2/app/routers/renderers.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""
|
||||
Renderer (L1) management routes for L2 server.
|
||||
|
||||
L1 servers are configured via environment variable L1_SERVERS.
|
||||
Users connect to renderers to create and run recipes.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html, wants_json
|
||||
|
||||
from ..config import settings
|
||||
from ..dependencies import get_templates, require_auth, get_user_from_cookie
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_renderer_health(url: str, timeout: float = 5.0) -> bool:
|
||||
"""Check if a renderer is healthy."""
|
||||
try:
|
||||
resp = requests.get(f"{url}/", timeout=timeout)
|
||||
return resp.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_renderers(request: Request):
|
||||
"""List configured L1 renderers."""
|
||||
# Get user if logged in
|
||||
username = get_user_from_cookie(request)
|
||||
user = None
|
||||
if username:
|
||||
# Get token for connection links
|
||||
token = request.cookies.get("auth_token", "")
|
||||
user = {"username": username, "token": token}
|
||||
|
||||
# Build server list with health status
|
||||
servers = []
|
||||
for url in settings.l1_servers:
|
||||
servers.append({
|
||||
"url": url,
|
||||
"healthy": check_renderer_health(url),
|
||||
})
|
||||
|
||||
if wants_json(request):
|
||||
return {"servers": servers}
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "renderers/list.html", request,
|
||||
servers=servers,
|
||||
user=user,
|
||||
active_tab="renderers",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{path:path}")
|
||||
async def renderer_catchall(path: str, request: Request):
|
||||
"""Catch-all for invalid renderer URLs - redirect to list."""
|
||||
if wants_json(request):
|
||||
raise HTTPException(404, "Not found")
|
||||
return RedirectResponse(url="/renderers", status_code=302)
|
||||
|
||||
|
||||
@router.post("")
|
||||
@router.post("/{path:path}")
|
||||
async def renderer_post_catchall(request: Request, path: str = ""):
|
||||
"""
|
||||
Catch-all for POST requests.
|
||||
|
||||
The old API expected JSON POST to attach renderers.
|
||||
Now renderers are env-configured, so redirect to the list.
|
||||
"""
|
||||
if wants_json(request):
|
||||
return {
|
||||
"error": "Renderers are now configured via environment. See /renderers for available servers.",
|
||||
"servers": settings.l1_servers,
|
||||
}
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "renderers/list.html", request,
|
||||
servers=[{"url": url, "healthy": check_renderer_health(url)} for url in settings.l1_servers],
|
||||
user=get_user_from_cookie(request),
|
||||
error="Renderers are configured by the system administrator. Use the Connect button to access a renderer.",
|
||||
active_tab="renderers",
|
||||
)
|
||||
254
l2/app/routers/storage.py
Normal file
254
l2/app/routers/storage.py
Normal file
@@ -0,0 +1,254 @@
|
||||
"""
|
||||
Storage provider routes for L2 server.
|
||||
|
||||
Manages user storage backends.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from fastapi import APIRouter, Request, Depends, HTTPException, Form
|
||||
from fastapi.responses import HTMLResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html, wants_json
|
||||
|
||||
from ..config import settings
|
||||
from ..dependencies import get_templates, require_auth, get_user_from_cookie
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
STORAGE_PROVIDERS_INFO = {
|
||||
"pinata": {"name": "Pinata", "desc": "1GB free, IPFS pinning", "color": "blue"},
|
||||
"web3storage": {"name": "web3.storage", "desc": "IPFS + Filecoin", "color": "green"},
|
||||
"nftstorage": {"name": "NFT.Storage", "desc": "Free for NFTs", "color": "pink"},
|
||||
"infura": {"name": "Infura IPFS", "desc": "5GB free", "color": "orange"},
|
||||
"filebase": {"name": "Filebase", "desc": "5GB free, S3+IPFS", "color": "cyan"},
|
||||
"storj": {"name": "Storj", "desc": "25GB free", "color": "indigo"},
|
||||
"local": {"name": "Local Storage", "desc": "Your own disk", "color": "purple"},
|
||||
}
|
||||
|
||||
|
||||
class AddStorageRequest(BaseModel):
|
||||
provider_type: str
|
||||
config: Dict[str, Any]
|
||||
capacity_gb: int = 5
|
||||
provider_name: Optional[str] = None
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def list_storage(request: Request):
|
||||
"""List user's storage providers."""
|
||||
import db
|
||||
|
||||
username = get_user_from_cookie(request)
|
||||
if not username:
|
||||
if wants_json(request):
|
||||
raise HTTPException(401, "Authentication required")
|
||||
from fastapi.responses import RedirectResponse
|
||||
return RedirectResponse(url="/login", status_code=302)
|
||||
|
||||
storages = await db.get_user_storage(username)
|
||||
|
||||
if wants_json(request):
|
||||
return {"storages": storages}
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "storage/list.html", request,
|
||||
storages=storages,
|
||||
user={"username": username},
|
||||
providers_info=STORAGE_PROVIDERS_INFO,
|
||||
active_tab="storage",
|
||||
)
|
||||
|
||||
|
||||
@router.post("")
|
||||
async def add_storage(
|
||||
req: AddStorageRequest,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Add a storage provider."""
|
||||
import db
|
||||
import storage_providers
|
||||
|
||||
if req.provider_type not in STORAGE_PROVIDERS_INFO:
|
||||
raise HTTPException(400, f"Invalid provider type: {req.provider_type}")
|
||||
|
||||
# Test connection
|
||||
provider = storage_providers.create_provider(req.provider_type, {
|
||||
**req.config,
|
||||
"capacity_gb": req.capacity_gb,
|
||||
})
|
||||
if not provider:
|
||||
raise HTTPException(400, "Failed to create provider")
|
||||
|
||||
success, message = await provider.test_connection()
|
||||
if not success:
|
||||
raise HTTPException(400, f"Connection failed: {message}")
|
||||
|
||||
# Save
|
||||
storage_id = await db.add_user_storage(
|
||||
username=user["username"],
|
||||
provider_type=req.provider_type,
|
||||
provider_name=req.provider_name,
|
||||
config=req.config,
|
||||
capacity_gb=req.capacity_gb,
|
||||
)
|
||||
|
||||
return {"id": storage_id, "message": "Storage provider added"}
|
||||
|
||||
|
||||
@router.post("/add", response_class=HTMLResponse)
|
||||
async def add_storage_form(
|
||||
request: Request,
|
||||
provider_type: str = Form(...),
|
||||
provider_name: Optional[str] = Form(None),
|
||||
capacity_gb: int = Form(5),
|
||||
api_key: Optional[str] = Form(None),
|
||||
secret_key: Optional[str] = Form(None),
|
||||
api_token: Optional[str] = Form(None),
|
||||
project_id: Optional[str] = Form(None),
|
||||
project_secret: Optional[str] = Form(None),
|
||||
access_key: Optional[str] = Form(None),
|
||||
bucket: Optional[str] = Form(None),
|
||||
path: Optional[str] = Form(None),
|
||||
):
|
||||
"""Add storage via HTML form."""
|
||||
import db
|
||||
import storage_providers
|
||||
|
||||
username = get_user_from_cookie(request)
|
||||
if not username:
|
||||
return HTMLResponse('<div class="text-red-400">Not authenticated</div>', status_code=401)
|
||||
|
||||
# Build config
|
||||
config = {}
|
||||
if provider_type == "pinata":
|
||||
if not api_key or not secret_key:
|
||||
return HTMLResponse('<div class="text-red-400">Pinata requires API Key and Secret Key</div>')
|
||||
config = {"api_key": api_key, "secret_key": secret_key}
|
||||
elif provider_type in ["web3storage", "nftstorage"]:
|
||||
if not api_token:
|
||||
return HTMLResponse(f'<div class="text-red-400">{provider_type} requires API Token</div>')
|
||||
config = {"api_token": api_token}
|
||||
elif provider_type == "infura":
|
||||
if not project_id or not project_secret:
|
||||
return HTMLResponse('<div class="text-red-400">Infura requires Project ID and Secret</div>')
|
||||
config = {"project_id": project_id, "project_secret": project_secret}
|
||||
elif provider_type in ["filebase", "storj"]:
|
||||
if not access_key or not secret_key or not bucket:
|
||||
return HTMLResponse('<div class="text-red-400">Requires Access Key, Secret Key, and Bucket</div>')
|
||||
config = {"access_key": access_key, "secret_key": secret_key, "bucket": bucket}
|
||||
elif provider_type == "local":
|
||||
if not path:
|
||||
return HTMLResponse('<div class="text-red-400">Local storage requires a path</div>')
|
||||
config = {"path": path}
|
||||
else:
|
||||
return HTMLResponse(f'<div class="text-red-400">Unknown provider: {provider_type}</div>')
|
||||
|
||||
# Test
|
||||
provider = storage_providers.create_provider(provider_type, {**config, "capacity_gb": capacity_gb})
|
||||
if provider:
|
||||
success, message = await provider.test_connection()
|
||||
if not success:
|
||||
return HTMLResponse(f'<div class="text-red-400">Connection failed: {message}</div>')
|
||||
|
||||
# Save
|
||||
storage_id = await db.add_user_storage(
|
||||
username=username,
|
||||
provider_type=provider_type,
|
||||
provider_name=provider_name,
|
||||
config=config,
|
||||
capacity_gb=capacity_gb,
|
||||
)
|
||||
|
||||
return HTMLResponse(f'''
|
||||
<div class="text-green-400 mb-2">Storage provider added!</div>
|
||||
<script>setTimeout(() => window.location.href = '/storage', 1500);</script>
|
||||
''')
|
||||
|
||||
|
||||
@router.get("/{storage_id}")
|
||||
async def get_storage(
|
||||
storage_id: int,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Get storage details."""
|
||||
import db
|
||||
|
||||
storage = await db.get_storage_by_id(storage_id)
|
||||
if not storage:
|
||||
raise HTTPException(404, "Storage not found")
|
||||
|
||||
if storage.get("username") != user["username"]:
|
||||
raise HTTPException(403, "Not authorized")
|
||||
|
||||
return storage
|
||||
|
||||
|
||||
@router.delete("/{storage_id}")
|
||||
async def delete_storage(
|
||||
storage_id: int,
|
||||
request: Request,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Delete a storage provider."""
|
||||
import db
|
||||
|
||||
storage = await db.get_storage_by_id(storage_id)
|
||||
if not storage:
|
||||
raise HTTPException(404, "Storage not found")
|
||||
|
||||
if storage.get("username") != user["username"]:
|
||||
raise HTTPException(403, "Not authorized")
|
||||
|
||||
success = await db.remove_user_storage(storage_id)
|
||||
|
||||
if wants_html(request):
|
||||
return HTMLResponse("")
|
||||
|
||||
return {"deleted": True}
|
||||
|
||||
|
||||
@router.post("/{storage_id}/test")
|
||||
async def test_storage(
|
||||
storage_id: int,
|
||||
request: Request,
|
||||
user: dict = Depends(require_auth),
|
||||
):
|
||||
"""Test storage connectivity."""
|
||||
import db
|
||||
import storage_providers
|
||||
import json
|
||||
|
||||
storage = await db.get_storage_by_id(storage_id)
|
||||
if not storage:
|
||||
raise HTTPException(404, "Storage not found")
|
||||
|
||||
if storage.get("username") != user["username"]:
|
||||
raise HTTPException(403, "Not authorized")
|
||||
|
||||
config = storage["config"]
|
||||
if isinstance(config, str):
|
||||
config = json.loads(config)
|
||||
|
||||
provider = storage_providers.create_provider(storage["provider_type"], {
|
||||
**config,
|
||||
"capacity_gb": storage.get("capacity_gb", 5),
|
||||
})
|
||||
|
||||
if not provider:
|
||||
if wants_html(request):
|
||||
return HTMLResponse('<span class="text-red-400">Failed to create provider</span>')
|
||||
return {"success": False, "message": "Failed to create provider"}
|
||||
|
||||
success, message = await provider.test_connection()
|
||||
|
||||
if wants_html(request):
|
||||
color = "green" if success else "red"
|
||||
return HTMLResponse(f'<span class="text-{color}-400">{message}</span>')
|
||||
|
||||
return {"success": success, "message": message}
|
||||
161
l2/app/routers/users.py
Normal file
161
l2/app/routers/users.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""
|
||||
User profile routes for L2 server.
|
||||
|
||||
Handles ActivityPub actor profiles.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Request, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from artdag_common import render
|
||||
from artdag_common.middleware import wants_html
|
||||
|
||||
from ..config import settings
|
||||
from ..dependencies import get_templates, get_user_from_cookie
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("/users/{username}")
|
||||
async def get_user_profile(
|
||||
username: str,
|
||||
request: Request,
|
||||
):
|
||||
"""Get user profile (ActivityPub actor)."""
|
||||
import db
|
||||
|
||||
user = await db.get_user(username)
|
||||
if not user:
|
||||
raise HTTPException(404, "User not found")
|
||||
|
||||
# ActivityPub response
|
||||
accept = request.headers.get("accept", "")
|
||||
if "application/activity+json" in accept or "application/ld+json" in accept:
|
||||
actor = {
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
],
|
||||
"type": "Person",
|
||||
"id": f"https://{settings.domain}/users/{username}",
|
||||
"name": user.get("display_name", username),
|
||||
"preferredUsername": username,
|
||||
"inbox": f"https://{settings.domain}/users/{username}/inbox",
|
||||
"outbox": f"https://{settings.domain}/users/{username}/outbox",
|
||||
"publicKey": {
|
||||
"id": f"https://{settings.domain}/users/{username}#main-key",
|
||||
"owner": f"https://{settings.domain}/users/{username}",
|
||||
"publicKeyPem": user.get("public_key", ""),
|
||||
},
|
||||
}
|
||||
return JSONResponse(content=actor, media_type="application/activity+json")
|
||||
|
||||
# HTML profile page
|
||||
current_user = get_user_from_cookie(request)
|
||||
assets = await db.get_user_assets(username, limit=12)
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "users/profile.html", request,
|
||||
profile=user,
|
||||
assets=assets,
|
||||
user={"username": current_user} if current_user else None,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/users/{username}/outbox")
|
||||
async def get_outbox(
|
||||
username: str,
|
||||
request: Request,
|
||||
page: bool = False,
|
||||
):
|
||||
"""Get user's outbox (ActivityPub)."""
|
||||
import db
|
||||
|
||||
user = await db.get_user(username)
|
||||
if not user:
|
||||
raise HTTPException(404, "User not found")
|
||||
|
||||
actor_id = f"https://{settings.domain}/users/{username}"
|
||||
|
||||
if not page:
|
||||
# Return collection summary
|
||||
total = await db.count_user_activities(username)
|
||||
return JSONResponse(
|
||||
content={
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollection",
|
||||
"id": f"{actor_id}/outbox",
|
||||
"totalItems": total,
|
||||
"first": f"{actor_id}/outbox?page=true",
|
||||
},
|
||||
media_type="application/activity+json",
|
||||
)
|
||||
|
||||
# Return paginated activities
|
||||
activities = await db.get_user_activities(username, limit=20)
|
||||
items = [a.get("activity_json", a) for a in activities]
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "OrderedCollectionPage",
|
||||
"id": f"{actor_id}/outbox?page=true",
|
||||
"partOf": f"{actor_id}/outbox",
|
||||
"orderedItems": items,
|
||||
},
|
||||
media_type="application/activity+json",
|
||||
)
|
||||
|
||||
|
||||
@router.post("/users/{username}/inbox")
|
||||
async def receive_inbox(
|
||||
username: str,
|
||||
request: Request,
|
||||
):
|
||||
"""Receive ActivityPub inbox message."""
|
||||
import db
|
||||
|
||||
user = await db.get_user(username)
|
||||
if not user:
|
||||
raise HTTPException(404, "User not found")
|
||||
|
||||
# TODO: Verify HTTP signature
|
||||
# TODO: Process activity (Follow, Like, Announce, etc.)
|
||||
|
||||
body = await request.json()
|
||||
logger.info(f"Received inbox activity for {username}: {body.get('type')}")
|
||||
|
||||
# For now, just acknowledge
|
||||
return {"status": "accepted"}
|
||||
|
||||
|
||||
@router.get("/")
|
||||
async def home(request: Request):
|
||||
"""Home page."""
|
||||
import db
|
||||
import markdown
|
||||
|
||||
username = get_user_from_cookie(request)
|
||||
|
||||
# Get recent activities
|
||||
activities, _ = await db.get_activities_paginated(limit=10)
|
||||
|
||||
# Get README if exists
|
||||
readme_html = ""
|
||||
try:
|
||||
from pathlib import Path
|
||||
readme_path = Path(__file__).parent.parent.parent / "README.md"
|
||||
if readme_path.exists():
|
||||
readme_html = markdown.markdown(readme_path.read_text(), extensions=['tables', 'fenced_code'])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
templates = get_templates(request)
|
||||
return render(templates, "home.html", request,
|
||||
user={"username": username} if username else None,
|
||||
activities=activities,
|
||||
readme_html=readme_html,
|
||||
)
|
||||
11
l2/app/templates/404.html
Normal file
11
l2/app/templates/404.html
Normal file
@@ -0,0 +1,11 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Not Found - Art-DAG{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="text-center py-16">
|
||||
<h2 class="text-6xl font-bold text-gray-600 mb-4">404</h2>
|
||||
<p class="text-xl text-gray-400 mb-8">Page not found</p>
|
||||
<a href="/" class="text-blue-400 hover:text-blue-300">Go to home page</a>
|
||||
</div>
|
||||
{% endblock %}
|
||||
39
l2/app/templates/activities/list.html
Normal file
39
l2/app/templates/activities/list.html
Normal file
@@ -0,0 +1,39 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Activities - Art-DAG{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-4xl mx-auto">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-2xl font-bold">Activities</h1>
|
||||
</div>
|
||||
|
||||
{% if activities %}
|
||||
<div class="space-y-4">
|
||||
{% for activity in activities %}
|
||||
<a href="/activities/{{ activity.activity_id }}"
|
||||
class="block bg-gray-800 border border-gray-700 rounded-lg p-4 hover:border-blue-500 transition-colors">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="text-blue-400 font-medium">{{ activity.activity_type }}</span>
|
||||
<span class="text-gray-500 text-sm">{{ activity.published }}</span>
|
||||
</div>
|
||||
<div class="text-gray-300 text-sm truncate">
|
||||
{{ activity.actor_id }}
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% if has_more %}
|
||||
<div class="mt-6 text-center">
|
||||
<a href="?offset={{ offset + limit }}&limit={{ limit }}"
|
||||
class="text-blue-400 hover:text-blue-300">Load More</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<div class="text-center py-12 text-gray-400">
|
||||
<p>No activities yet.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
47
l2/app/templates/anchors/list.html
Normal file
47
l2/app/templates/anchors/list.html
Normal file
@@ -0,0 +1,47 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Anchors - Art-DAG{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-4xl mx-auto">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-2xl font-bold">Bitcoin Anchors</h1>
|
||||
</div>
|
||||
|
||||
{% if anchors %}
|
||||
<div class="space-y-4">
|
||||
{% for anchor in anchors %}
|
||||
<div class="bg-gray-800 border border-gray-700 rounded-lg p-4">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="font-mono text-sm text-blue-400 truncate">{{ anchor.merkle_root[:16] }}...</span>
|
||||
{% if anchor.confirmed_at %}
|
||||
<span class="bg-green-600 text-xs px-2 py-1 rounded">Confirmed</span>
|
||||
{% else %}
|
||||
<span class="bg-yellow-600 text-xs px-2 py-1 rounded">Pending</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="text-gray-400 text-sm">
|
||||
{{ anchor.activity_count or 0 }} activities | Created: {{ anchor.created_at }}
|
||||
</div>
|
||||
{% if anchor.bitcoin_txid %}
|
||||
<div class="mt-2 text-xs text-gray-500 font-mono truncate">
|
||||
TX: {{ anchor.bitcoin_txid }}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% if has_more %}
|
||||
<div class="mt-6 text-center">
|
||||
<a href="?offset={{ offset + limit }}&limit={{ limit }}"
|
||||
class="text-blue-400 hover:text-blue-300">Load More</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<div class="text-center py-12 text-gray-400">
|
||||
<p>No anchors yet.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
58
l2/app/templates/assets/list.html
Normal file
58
l2/app/templates/assets/list.html
Normal file
@@ -0,0 +1,58 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Assets - Art-DAG{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-6xl mx-auto">
|
||||
<h1 class="text-3xl font-bold mb-6">Your Assets</h1>
|
||||
|
||||
{% if assets %}
|
||||
<div class="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4" id="assets-grid">
|
||||
{% for asset in assets %}
|
||||
<a href="/assets/{{ asset.id }}"
|
||||
class="bg-gray-800 rounded-lg overflow-hidden hover:ring-2 hover:ring-blue-500 transition-all">
|
||||
{% if asset.asset_type == 'image' %}
|
||||
<img src="{{ asset.thumbnail_url or '/assets/' + asset.id + '/thumb' }}"
|
||||
alt="{{ asset.name }}"
|
||||
class="w-full h-40 object-cover">
|
||||
{% elif asset.asset_type == 'video' %}
|
||||
<div class="w-full h-40 bg-gray-900 flex items-center justify-center">
|
||||
<svg class="w-12 h-12 text-gray-600" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path d="M6.3 2.841A1.5 1.5 0 004 4.11V15.89a1.5 1.5 0 002.3 1.269l9.344-5.89a1.5 1.5 0 000-2.538L6.3 2.84z"/>
|
||||
</svg>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="w-full h-40 bg-gray-900 flex items-center justify-center">
|
||||
<span class="text-gray-600">{{ asset.asset_type }}</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="p-3">
|
||||
<div class="font-medium text-white truncate">{{ asset.name }}</div>
|
||||
<div class="text-xs text-gray-500">{{ asset.asset_type }}</div>
|
||||
{% if asset.ipfs_cid %}
|
||||
<div class="text-xs text-green-400 mt-1">Published</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% if has_more %}
|
||||
<div hx-get="/assets?offset={{ offset + limit }}"
|
||||
hx-trigger="revealed"
|
||||
hx-swap="beforeend"
|
||||
hx-target="#assets-grid"
|
||||
class="h-20 flex items-center justify-center text-gray-500 mt-4">
|
||||
Loading more...
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
<div class="bg-gray-800 border border-gray-700 rounded-lg p-12 text-center">
|
||||
<p class="text-gray-500 mb-4">No assets yet</p>
|
||||
<p class="text-gray-600 text-sm">Create content on an L1 renderer and publish it here.</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
12
l2/app/templates/auth/already_logged_in.html
Normal file
12
l2/app/templates/auth/already_logged_in.html
Normal file
@@ -0,0 +1,12 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Already Logged In - Art-DAG{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-md mx-auto text-center">
|
||||
<div class="bg-green-900/50 border border-green-700 text-green-300 px-4 py-3 rounded-lg mb-4">
|
||||
You are already logged in as <strong>{{ user.username }}</strong>
|
||||
</div>
|
||||
<p><a href="/" class="text-blue-400 hover:text-blue-300">Go to home page</a></p>
|
||||
</div>
|
||||
{% endblock %}
|
||||
37
l2/app/templates/auth/login.html
Normal file
37
l2/app/templates/auth/login.html
Normal file
@@ -0,0 +1,37 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Login - Art-DAG{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-md mx-auto">
|
||||
<h2 class="text-xl font-semibold mb-6">Login</h2>
|
||||
|
||||
<div id="login-result"></div>
|
||||
|
||||
<form hx-post="/auth/login" hx-target="#login-result" hx-swap="innerHTML" class="space-y-4">
|
||||
{% if return_to %}
|
||||
<input type="hidden" name="return_to" value="{{ return_to }}">
|
||||
{% endif %}
|
||||
|
||||
<div>
|
||||
<label for="username" class="block text-sm font-medium text-gray-300 mb-2">Username</label>
|
||||
<input type="text" id="username" name="username" required
|
||||
class="w-full px-4 py-3 bg-gray-800 border border-gray-600 rounded-lg text-white focus:border-blue-500 focus:outline-none">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label for="password" class="block text-sm font-medium text-gray-300 mb-2">Password</label>
|
||||
<input type="password" id="password" name="password" required
|
||||
class="w-full px-4 py-3 bg-gray-800 border border-gray-600 rounded-lg text-white focus:border-blue-500 focus:outline-none">
|
||||
</div>
|
||||
|
||||
<button type="submit" class="w-full px-4 py-3 bg-blue-600 hover:bg-blue-700 text-white font-medium rounded-lg">
|
||||
Login
|
||||
</button>
|
||||
</form>
|
||||
|
||||
<p class="mt-6 text-gray-400">
|
||||
Don't have an account? <a href="/auth/register" class="text-blue-400 hover:text-blue-300">Register</a>
|
||||
</p>
|
||||
</div>
|
||||
{% endblock %}
|
||||
45
l2/app/templates/auth/register.html
Normal file
45
l2/app/templates/auth/register.html
Normal file
@@ -0,0 +1,45 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Register - Art-DAG{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-md mx-auto">
|
||||
<h2 class="text-xl font-semibold mb-6">Register</h2>
|
||||
|
||||
<div id="register-result"></div>
|
||||
|
||||
<form hx-post="/auth/register" hx-target="#register-result" hx-swap="innerHTML" class="space-y-4">
|
||||
<div>
|
||||
<label for="username" class="block text-sm font-medium text-gray-300 mb-2">Username</label>
|
||||
<input type="text" id="username" name="username" required pattern="[a-zA-Z0-9_-]+"
|
||||
class="w-full px-4 py-3 bg-gray-800 border border-gray-600 rounded-lg text-white focus:border-blue-500 focus:outline-none">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label for="email" class="block text-sm font-medium text-gray-300 mb-2">Email (optional)</label>
|
||||
<input type="email" id="email" name="email"
|
||||
class="w-full px-4 py-3 bg-gray-800 border border-gray-600 rounded-lg text-white focus:border-blue-500 focus:outline-none">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label for="password" class="block text-sm font-medium text-gray-300 mb-2">Password</label>
|
||||
<input type="password" id="password" name="password" required minlength="6"
|
||||
class="w-full px-4 py-3 bg-gray-800 border border-gray-600 rounded-lg text-white focus:border-blue-500 focus:outline-none">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label for="password2" class="block text-sm font-medium text-gray-300 mb-2">Confirm Password</label>
|
||||
<input type="password" id="password2" name="password2" required minlength="6"
|
||||
class="w-full px-4 py-3 bg-gray-800 border border-gray-600 rounded-lg text-white focus:border-blue-500 focus:outline-none">
|
||||
</div>
|
||||
|
||||
<button type="submit" class="w-full px-4 py-3 bg-blue-600 hover:bg-blue-700 text-white font-medium rounded-lg">
|
||||
Register
|
||||
</button>
|
||||
</form>
|
||||
|
||||
<p class="mt-6 text-gray-400">
|
||||
Already have an account? <a href="/auth/login" class="text-blue-400 hover:text-blue-300">Login</a>
|
||||
</p>
|
||||
</div>
|
||||
{% endblock %}
|
||||
47
l2/app/templates/base.html
Normal file
47
l2/app/templates/base.html
Normal file
@@ -0,0 +1,47 @@
|
||||
{% extends "_base.html" %}
|
||||
|
||||
{% block brand %}
|
||||
<a href="https://blog.rose-ash.com/" class="no-underline text-stone-900">Rose Ash</a>
|
||||
<span class="text-stone-400 mx-1">|</span>
|
||||
<a href="/" class="no-underline text-stone-900">Art-DAG</a>
|
||||
<span class="text-stone-400 mx-1">/</span>
|
||||
<span class="text-stone-600 text-3xl">L2</span>
|
||||
{% endblock %}
|
||||
|
||||
{% block cart_mini %}
|
||||
{% if request and request.state.cart_mini_html %}
|
||||
{{ request.state.cart_mini_html | safe }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block nav_tree %}
|
||||
{% if request and request.state.nav_tree_html %}
|
||||
{{ request.state.nav_tree_html | safe }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block auth_menu %}
|
||||
{% if request and request.state.auth_menu_html %}
|
||||
{{ request.state.auth_menu_html | safe }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block auth_menu_mobile %}
|
||||
{% if request and request.state.auth_menu_html %}
|
||||
{{ request.state.auth_menu_html | safe }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block sub_nav %}
|
||||
<div class="bg-stone-200 border-b border-stone-300">
|
||||
<div class="max-w-screen-2xl mx-auto px-4">
|
||||
<nav class="flex items-center gap-4 py-2 text-sm overflow-x-auto no-scrollbar">
|
||||
<a href="/assets" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'assets' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Assets</a>
|
||||
<a href="/activities" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'activities' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Activities</a>
|
||||
<a href="/anchors" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'anchors' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Anchors</a>
|
||||
<a href="/storage" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'storage' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Storage</a>
|
||||
<a href="/renderers" class="whitespace-nowrap px-3 py-1.5 rounded {% if active_tab == 'renderers' %}bg-stone-500 text-white{% else %}text-stone-700 hover:bg-stone-300{% endif %}">Renderers</a>
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
42
l2/app/templates/home.html
Normal file
42
l2/app/templates/home.html
Normal file
@@ -0,0 +1,42 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Art-DAG{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-4xl mx-auto">
|
||||
{% if readme_html %}
|
||||
<div class="prose prose-invert max-w-none mb-12">
|
||||
{{ readme_html | safe }}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="text-center py-12">
|
||||
<h1 class="text-4xl font-bold mb-4">Art-DAG</h1>
|
||||
<p class="text-xl text-gray-400 mb-8">Content-Addressable Media with ActivityPub Federation</p>
|
||||
|
||||
{% if not user %}
|
||||
<div class="flex justify-center space-x-4">
|
||||
<a href="/auth/login" class="bg-gray-700 hover:bg-gray-600 px-6 py-3 rounded-lg font-medium">Login</a>
|
||||
<a href="/auth/register" class="bg-blue-600 hover:bg-blue-700 px-6 py-3 rounded-lg font-medium">Register</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if activities %}
|
||||
<h2 class="text-2xl font-bold mb-4">Recent Activity</h2>
|
||||
<div class="space-y-4">
|
||||
{% for activity in activities %}
|
||||
<div class="bg-gray-800 rounded-lg p-4">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="text-blue-400">{{ activity.actor }}</span>
|
||||
<span class="text-gray-500 text-sm">{{ activity.created_at }}</span>
|
||||
</div>
|
||||
<div class="text-gray-300">
|
||||
{{ activity.type }}: {{ activity.summary or activity.object_type }}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
52
l2/app/templates/renderers/list.html
Normal file
52
l2/app/templates/renderers/list.html
Normal file
@@ -0,0 +1,52 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-4xl mx-auto">
|
||||
<h1 class="text-2xl font-bold mb-6">Renderers</h1>
|
||||
|
||||
<p class="text-gray-400 mb-6">
|
||||
Renderers are L1 servers that process your media. Connect to a renderer to create and run recipes.
|
||||
</p>
|
||||
|
||||
{% if error %}
|
||||
<div class="bg-red-900/50 border border-red-500 text-red-200 px-4 py-3 rounded mb-6">
|
||||
{{ error }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if success %}
|
||||
<div class="bg-green-900/50 border border-green-500 text-green-200 px-4 py-3 rounded mb-6">
|
||||
{{ success }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="space-y-4">
|
||||
{% for server in servers %}
|
||||
<div class="bg-gray-800 rounded-lg p-4 flex items-center justify-between">
|
||||
<div>
|
||||
<a href="{{ server.url }}" target="_blank" class="text-blue-400 hover:text-blue-300 font-medium">
|
||||
{{ server.url }}
|
||||
</a>
|
||||
{% if server.healthy %}
|
||||
<span class="ml-2 text-green-400 text-sm">Online</span>
|
||||
{% else %}
|
||||
<span class="ml-2 text-red-400 text-sm">Offline</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="flex gap-2">
|
||||
<a href="{{ server.url }}/auth?auth_token={{ user.token }}"
|
||||
class="px-3 py-1 bg-blue-600 hover:bg-blue-500 rounded text-sm">
|
||||
Connect
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-gray-500">No renderers configured.</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
<div class="mt-8 text-gray-500 text-sm">
|
||||
<p>Renderers are configured by the system administrator.</p>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
41
l2/app/templates/storage/list.html
Normal file
41
l2/app/templates/storage/list.html
Normal file
@@ -0,0 +1,41 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Storage - Art-DAG{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-4xl mx-auto">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-2xl font-bold">Storage Providers</h1>
|
||||
<a href="/storage/add" class="bg-blue-600 hover:bg-blue-700 px-4 py-2 rounded-lg text-sm">
|
||||
Add Storage
|
||||
</a>
|
||||
</div>
|
||||
|
||||
{% if storages %}
|
||||
<div class="space-y-4">
|
||||
{% for storage in storages %}
|
||||
<div class="bg-gray-800 border border-gray-700 rounded-lg p-4">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<span class="font-medium">{{ storage.name or storage.provider_type }}</span>
|
||||
<span class="text-xs px-2 py-1 rounded {% if storage.is_active %}bg-green-600{% else %}bg-gray-600{% endif %}">
|
||||
{{ storage.provider_type }}
|
||||
</span>
|
||||
</div>
|
||||
<div class="text-gray-400 text-sm">
|
||||
{% if storage.endpoint %}
|
||||
{{ storage.endpoint }}
|
||||
{% elif storage.bucket %}
|
||||
Bucket: {{ storage.bucket }}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="text-center py-12 text-gray-400">
|
||||
<p>No storage providers configured.</p>
|
||||
<a href="/storage/add" class="text-blue-400 hover:text-blue-300 mt-2 inline-block">Add one now</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
BIN
l2/artdag-client.tar.gz
Normal file
BIN
l2/artdag-client.tar.gz
Normal file
Binary file not shown.
213
l2/auth.py
Normal file
213
l2/auth.py
Normal file
@@ -0,0 +1,213 @@
|
||||
"""
|
||||
Authentication for Art DAG L2 Server.
|
||||
|
||||
User registration, login, and JWT tokens.
|
||||
"""
|
||||
|
||||
import os
|
||||
import secrets
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import bcrypt
|
||||
from jose import JWTError, jwt
|
||||
from pydantic import BaseModel
|
||||
|
||||
import db
|
||||
|
||||
# JWT settings
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_DAYS = 30
|
||||
|
||||
|
||||
def load_jwt_secret() -> str:
|
||||
"""Load JWT secret from Docker secret, env var, or generate."""
|
||||
# Try Docker secret first
|
||||
secret_path = Path("/run/secrets/jwt_secret")
|
||||
if secret_path.exists():
|
||||
return secret_path.read_text().strip()
|
||||
|
||||
# Try environment variable
|
||||
if os.environ.get("JWT_SECRET"):
|
||||
return os.environ["JWT_SECRET"]
|
||||
|
||||
# Generate one (tokens won't persist across restarts!)
|
||||
print("WARNING: No JWT_SECRET configured. Tokens will be invalidated on restart.")
|
||||
return secrets.token_hex(32)
|
||||
|
||||
|
||||
SECRET_KEY = load_jwt_secret()
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
"""A registered user."""
|
||||
username: str
|
||||
password_hash: str
|
||||
created_at: str
|
||||
email: Optional[str] = None
|
||||
|
||||
|
||||
class UserCreate(BaseModel):
|
||||
"""Request to register a user."""
|
||||
username: str
|
||||
password: str
|
||||
email: Optional[str] = None
|
||||
|
||||
|
||||
class UserLogin(BaseModel):
|
||||
"""Request to login."""
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
"""JWT token response."""
|
||||
access_token: str
|
||||
token_type: str = "bearer"
|
||||
username: str
|
||||
expires_at: str
|
||||
|
||||
|
||||
# Keep DATA_DIR for keys (RSA keys still stored as files)
|
||||
DATA_DIR = Path(os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2")))
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""Hash a password (truncate to 72 bytes for bcrypt)."""
|
||||
# Truncate to 72 bytes (bcrypt limit)
|
||||
pw_bytes = password.encode('utf-8')[:72]
|
||||
return bcrypt.hashpw(pw_bytes, bcrypt.gensalt()).decode('utf-8')
|
||||
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
"""Verify a password against its hash."""
|
||||
pw_bytes = plain_password.encode('utf-8')[:72]
|
||||
return bcrypt.checkpw(pw_bytes, hashed_password.encode('utf-8'))
|
||||
|
||||
|
||||
async def create_user(data_dir: Path, username: str, password: str, email: Optional[str] = None) -> User:
|
||||
"""Create a new user with ActivityPub keys."""
|
||||
from keys import generate_keypair
|
||||
|
||||
if await db.user_exists(username):
|
||||
raise ValueError(f"Username already exists: {username}")
|
||||
|
||||
password_hash = hash_password(password)
|
||||
user_data = await db.create_user(username, password_hash, email)
|
||||
|
||||
# Generate ActivityPub keys for this user
|
||||
generate_keypair(data_dir, username)
|
||||
|
||||
# Convert datetime to ISO string if needed
|
||||
created_at = user_data.get("created_at")
|
||||
if hasattr(created_at, 'isoformat'):
|
||||
created_at = created_at.isoformat()
|
||||
|
||||
return User(
|
||||
username=username,
|
||||
password_hash=password_hash,
|
||||
created_at=created_at,
|
||||
email=email
|
||||
)
|
||||
|
||||
|
||||
async def authenticate_user(data_dir: Path, username: str, password: str) -> Optional[User]:
|
||||
"""Authenticate a user by username and password."""
|
||||
user_data = await db.get_user(username)
|
||||
|
||||
if not user_data:
|
||||
return None
|
||||
|
||||
if not verify_password(password, user_data["password_hash"]):
|
||||
return None
|
||||
|
||||
# Convert datetime to ISO string if needed
|
||||
created_at = user_data.get("created_at")
|
||||
if hasattr(created_at, 'isoformat'):
|
||||
created_at = created_at.isoformat()
|
||||
|
||||
return User(
|
||||
username=user_data["username"],
|
||||
password_hash=user_data["password_hash"],
|
||||
created_at=created_at,
|
||||
email=user_data.get("email")
|
||||
)
|
||||
|
||||
|
||||
def create_access_token(username: str, l2_server: str = None, l1_server: str = None) -> Token:
|
||||
"""Create a JWT access token.
|
||||
|
||||
Args:
|
||||
username: The username
|
||||
l2_server: The L2 server URL (e.g., https://artdag.rose-ash.com)
|
||||
Required for L1 to verify tokens with the correct L2.
|
||||
l1_server: Optional L1 server URL to scope the token to.
|
||||
If set, token only works for this specific L1.
|
||||
"""
|
||||
expires = datetime.now(timezone.utc) + timedelta(days=ACCESS_TOKEN_EXPIRE_DAYS)
|
||||
|
||||
payload = {
|
||||
"sub": username,
|
||||
"username": username, # Also include as username for compatibility
|
||||
"exp": expires,
|
||||
"iat": datetime.now(timezone.utc)
|
||||
}
|
||||
|
||||
# Include l2_server so L1 knows which L2 to verify with
|
||||
if l2_server:
|
||||
payload["l2_server"] = l2_server
|
||||
|
||||
# Include l1_server to scope token to specific L1
|
||||
if l1_server:
|
||||
payload["l1_server"] = l1_server
|
||||
|
||||
token = jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM)
|
||||
|
||||
return Token(
|
||||
access_token=token,
|
||||
username=username,
|
||||
expires_at=expires.isoformat()
|
||||
)
|
||||
|
||||
|
||||
def verify_token(token: str) -> Optional[str]:
|
||||
"""Verify a JWT token, return username if valid."""
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
username = payload.get("sub")
|
||||
return username
|
||||
except JWTError:
|
||||
return None
|
||||
|
||||
|
||||
def get_token_claims(token: str) -> Optional[dict]:
|
||||
"""Decode token and return all claims. Returns None if invalid."""
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
return payload
|
||||
except JWTError:
|
||||
return None
|
||||
|
||||
|
||||
async def get_current_user(data_dir: Path, token: str) -> Optional[User]:
|
||||
"""Get current user from token."""
|
||||
username = verify_token(token)
|
||||
if not username:
|
||||
return None
|
||||
|
||||
user_data = await db.get_user(username)
|
||||
if not user_data:
|
||||
return None
|
||||
|
||||
# Convert datetime to ISO string if needed
|
||||
created_at = user_data.get("created_at")
|
||||
if hasattr(created_at, 'isoformat'):
|
||||
created_at = created_at.isoformat()
|
||||
|
||||
return User(
|
||||
username=user_data["username"],
|
||||
password_hash=user_data["password_hash"],
|
||||
created_at=created_at,
|
||||
email=user_data.get("email")
|
||||
)
|
||||
19
l2/deploy.sh
Executable file
19
l2/deploy.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
echo "=== Pulling latest code ==="
|
||||
git pull
|
||||
|
||||
echo "=== Building Docker image ==="
|
||||
docker build --build-arg CACHEBUST=$(date +%s) -t git.rose-ash.com/art-dag/l2-server:latest .
|
||||
|
||||
echo "=== Redeploying activitypub stack ==="
|
||||
docker stack deploy -c docker-compose.yml activitypub
|
||||
|
||||
echo "=== Restarting proxy nginx ==="
|
||||
docker service update --force proxy_nginx
|
||||
|
||||
echo "=== Done ==="
|
||||
docker stack services activitypub
|
||||
90
l2/docker-compose.yml
Normal file
90
l2/docker-compose.yml
Normal file
@@ -0,0 +1,90 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
POSTGRES_USER: artdag
|
||||
POSTGRES_DB: artdag
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- internal
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U artdag"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
ipfs:
|
||||
image: ipfs/kubo:latest
|
||||
ports:
|
||||
- "4002:4001" # Swarm TCP (4002 external, L1 uses 4001)
|
||||
- "4002:4001/udp" # Swarm UDP
|
||||
volumes:
|
||||
- ipfs_data:/data/ipfs
|
||||
networks:
|
||||
- internal
|
||||
- externalnet # For gateway access
|
||||
deploy:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
l2-server:
|
||||
image: registry.rose-ash.com:5000/l2-server:latest
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- ARTDAG_DATA=/data/l2
|
||||
- IPFS_API=/dns/ipfs/tcp/5001
|
||||
- ANCHOR_BACKUP_DIR=/data/anchors
|
||||
# Coop app internal URLs for fragment composition
|
||||
- INTERNAL_URL_BLOG=http://blog:8000
|
||||
- INTERNAL_URL_CART=http://cart:8000
|
||||
- INTERNAL_URL_ACCOUNT=http://account:8000
|
||||
# DATABASE_URL, ARTDAG_DOMAIN, ARTDAG_USER, JWT_SECRET from .env file
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8200/')"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 15s
|
||||
volumes:
|
||||
- l2_data:/data/l2 # Still needed for RSA keys
|
||||
- anchor_backup:/data/anchors # Persistent anchor proofs (survives DB wipes)
|
||||
networks:
|
||||
- internal
|
||||
- externalnet
|
||||
depends_on:
|
||||
- postgres
|
||||
- ipfs
|
||||
deploy:
|
||||
replicas: 1
|
||||
update_config:
|
||||
order: start-first
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.gpu != true
|
||||
|
||||
volumes:
|
||||
l2_data:
|
||||
postgres_data:
|
||||
ipfs_data:
|
||||
anchor_backup: # Persistent - don't delete when resetting DB
|
||||
|
||||
networks:
|
||||
internal:
|
||||
externalnet:
|
||||
external: true
|
||||
91
l2/docker-stack.yml
Normal file
91
l2/docker-stack.yml
Normal file
@@ -0,0 +1,91 @@
|
||||
version: "3.8"
|
||||
|
||||
# Full Art DAG stack for Docker Swarm deployment
|
||||
# Deploy with: docker stack deploy -c docker-stack.yml artdag
|
||||
|
||||
services:
|
||||
# Redis for L1
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
networks:
|
||||
- artdag
|
||||
deploy:
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
# L1 Server (API)
|
||||
l1-server:
|
||||
image: git.rose-ash.com/art-dag/l1-server:latest
|
||||
ports:
|
||||
- "8100:8100"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/5
|
||||
- CACHE_DIR=/data/cache
|
||||
# L1_PUBLIC_URL, L2_SERVER, L2_DOMAIN from .env file
|
||||
volumes:
|
||||
- l1_cache:/data/cache
|
||||
depends_on:
|
||||
- redis
|
||||
networks:
|
||||
- artdag
|
||||
deploy:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
# L1 Worker (Celery)
|
||||
l1-worker:
|
||||
image: git.rose-ash.com/art-dag/l1-server:latest
|
||||
command: celery -A celery_app worker --loglevel=info
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/5
|
||||
- CACHE_DIR=/data/cache
|
||||
- C_FORCE_ROOT=true
|
||||
volumes:
|
||||
- l1_cache:/data/cache
|
||||
depends_on:
|
||||
- redis
|
||||
networks:
|
||||
- artdag
|
||||
deploy:
|
||||
replicas: 2
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
# L2 Server (ActivityPub)
|
||||
l2-server:
|
||||
image: git.rose-ash.com/art-dag/l2-server:latest
|
||||
ports:
|
||||
- "8200:8200"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- ARTDAG_DATA=/data/l2
|
||||
# ARTDAG_DOMAIN, JWT_SECRET from .env file (multi-actor, no ARTDAG_USER)
|
||||
volumes:
|
||||
- l2_data:/data/l2
|
||||
depends_on:
|
||||
- l1-server
|
||||
networks:
|
||||
- artdag
|
||||
deploy:
|
||||
replicas: 1
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
l1_cache:
|
||||
l2_data:
|
||||
|
||||
networks:
|
||||
artdag:
|
||||
driver: overlay
|
||||
226
l2/ipfs_client.py
Normal file
226
l2/ipfs_client.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# art-activity-pub/ipfs_client.py
|
||||
"""
|
||||
IPFS client for Art DAG L2 server.
|
||||
|
||||
Provides functions to fetch, pin, and add content to IPFS.
|
||||
Uses direct HTTP API calls for compatibility with all Kubo versions.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
class IPFSError(Exception):
|
||||
"""Raised when an IPFS operation fails."""
|
||||
pass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# IPFS API multiaddr - default to local, docker uses /dns/ipfs/tcp/5001
|
||||
IPFS_API = os.getenv("IPFS_API", "/ip4/127.0.0.1/tcp/5001")
|
||||
|
||||
# Connection timeout in seconds
|
||||
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "60"))
|
||||
|
||||
|
||||
def _multiaddr_to_url(multiaddr: str) -> str:
|
||||
"""Convert IPFS multiaddr to HTTP URL."""
|
||||
# Handle /dns/hostname/tcp/port format
|
||||
dns_match = re.match(r"/dns[46]?/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if dns_match:
|
||||
return f"http://{dns_match.group(1)}:{dns_match.group(2)}"
|
||||
|
||||
# Handle /ip4/address/tcp/port format
|
||||
ip4_match = re.match(r"/ip4/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if ip4_match:
|
||||
return f"http://{ip4_match.group(1)}:{ip4_match.group(2)}"
|
||||
|
||||
# Fallback: assume it's already a URL or use default
|
||||
if multiaddr.startswith("http"):
|
||||
return multiaddr
|
||||
return "http://127.0.0.1:5001"
|
||||
|
||||
|
||||
# Base URL for IPFS API
|
||||
IPFS_BASE_URL = _multiaddr_to_url(IPFS_API)
|
||||
|
||||
|
||||
def get_bytes(cid: str) -> Optional[bytes]:
|
||||
"""
|
||||
Retrieve content from IPFS by CID.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to retrieve
|
||||
|
||||
Returns:
|
||||
Content as bytes or None on failure
|
||||
"""
|
||||
try:
|
||||
url = f"{IPFS_BASE_URL}/api/v0/cat"
|
||||
params = {"arg": cid}
|
||||
|
||||
response = requests.post(url, params=params, timeout=IPFS_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
data = response.content
|
||||
|
||||
logger.info(f"Retrieved from IPFS: {cid} ({len(data)} bytes)")
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get from IPFS: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def pin(cid: str) -> bool:
|
||||
"""
|
||||
Pin a CID on this node.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to pin
|
||||
|
||||
Returns:
|
||||
True on success, False on failure
|
||||
"""
|
||||
try:
|
||||
url = f"{IPFS_BASE_URL}/api/v0/pin/add"
|
||||
params = {"arg": cid}
|
||||
|
||||
response = requests.post(url, params=params, timeout=IPFS_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
|
||||
logger.info(f"Pinned on IPFS: {cid}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to pin on IPFS: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def unpin(cid: str) -> bool:
|
||||
"""
|
||||
Unpin a CID from this node.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to unpin
|
||||
|
||||
Returns:
|
||||
True on success, False on failure
|
||||
"""
|
||||
try:
|
||||
url = f"{IPFS_BASE_URL}/api/v0/pin/rm"
|
||||
params = {"arg": cid}
|
||||
|
||||
response = requests.post(url, params=params, timeout=IPFS_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
|
||||
logger.info(f"Unpinned from IPFS: {cid}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to unpin from IPFS: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def is_available() -> bool:
|
||||
"""
|
||||
Check if IPFS daemon is available.
|
||||
|
||||
Returns:
|
||||
True if IPFS is available, False otherwise
|
||||
"""
|
||||
try:
|
||||
url = f"{IPFS_BASE_URL}/api/v0/id"
|
||||
response = requests.post(url, timeout=5)
|
||||
return response.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def get_node_id() -> Optional[str]:
|
||||
"""
|
||||
Get this IPFS node's peer ID.
|
||||
|
||||
Returns:
|
||||
Peer ID string or None on failure
|
||||
"""
|
||||
try:
|
||||
url = f"{IPFS_BASE_URL}/api/v0/id"
|
||||
response = requests.post(url, timeout=IPFS_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
return response.json().get("ID")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get node ID: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def add_bytes(data: bytes, pin: bool = True) -> str:
|
||||
"""
|
||||
Add bytes data to IPFS and optionally pin it.
|
||||
|
||||
Args:
|
||||
data: Bytes to add
|
||||
pin: Whether to pin the data (default: True)
|
||||
|
||||
Returns:
|
||||
IPFS CID
|
||||
|
||||
Raises:
|
||||
IPFSError: If adding fails
|
||||
"""
|
||||
try:
|
||||
url = f"{IPFS_BASE_URL}/api/v0/add"
|
||||
params = {"pin": str(pin).lower()}
|
||||
files = {"file": ("data", data)}
|
||||
|
||||
response = requests.post(url, params=params, files=files, timeout=IPFS_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
cid = result["Hash"]
|
||||
|
||||
logger.info(f"Added to IPFS: {len(data)} bytes -> {cid}")
|
||||
return cid
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to add bytes to IPFS: {e}")
|
||||
raise IPFSError(f"Failed to add bytes to IPFS: {e}") from e
|
||||
|
||||
|
||||
def add_json(data: dict) -> str:
|
||||
"""
|
||||
Serialize dict to JSON and add to IPFS.
|
||||
|
||||
Args:
|
||||
data: Dictionary to serialize and store
|
||||
|
||||
Returns:
|
||||
IPFS CID
|
||||
|
||||
Raises:
|
||||
IPFSError: If adding fails
|
||||
"""
|
||||
json_bytes = json.dumps(data, indent=2, sort_keys=True).encode('utf-8')
|
||||
return add_bytes(json_bytes, pin=True)
|
||||
|
||||
|
||||
def pin_or_raise(cid: str) -> None:
|
||||
"""
|
||||
Pin a CID on IPFS. Raises exception on failure.
|
||||
|
||||
Args:
|
||||
cid: IPFS CID to pin
|
||||
|
||||
Raises:
|
||||
IPFSError: If pinning fails
|
||||
"""
|
||||
try:
|
||||
url = f"{IPFS_BASE_URL}/api/v0/pin/add"
|
||||
params = {"arg": cid}
|
||||
|
||||
response = requests.post(url, params=params, timeout=IPFS_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
|
||||
logger.info(f"Pinned on IPFS: {cid}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to pin on IPFS: {e}")
|
||||
raise IPFSError(f"Failed to pin {cid}: {e}") from e
|
||||
119
l2/keys.py
Normal file
119
l2/keys.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""
|
||||
Key management for ActivityPub signing.
|
||||
|
||||
Keys are stored in DATA_DIR/keys/:
|
||||
- {username}.pem - Private key (chmod 600)
|
||||
- {username}.pub - Public key
|
||||
"""
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa, padding
|
||||
|
||||
|
||||
def get_keys_dir(data_dir: Path) -> Path:
|
||||
"""Get keys directory, create if needed."""
|
||||
keys_dir = data_dir / "keys"
|
||||
keys_dir.mkdir(parents=True, exist_ok=True)
|
||||
return keys_dir
|
||||
|
||||
|
||||
def generate_keypair(data_dir: Path, username: str) -> tuple[str, str]:
|
||||
"""Generate RSA keypair for a user.
|
||||
|
||||
Returns (private_pem, public_pem)
|
||||
"""
|
||||
keys_dir = get_keys_dir(data_dir)
|
||||
private_path = keys_dir / f"{username}.pem"
|
||||
public_path = keys_dir / f"{username}.pub"
|
||||
|
||||
# Generate key
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
)
|
||||
|
||||
# Serialize private key
|
||||
private_pem = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption()
|
||||
).decode()
|
||||
|
||||
# Serialize public key
|
||||
public_pem = private_key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
||||
).decode()
|
||||
|
||||
# Save keys
|
||||
private_path.write_text(private_pem)
|
||||
private_path.chmod(0o600)
|
||||
public_path.write_text(public_pem)
|
||||
|
||||
return private_pem, public_pem
|
||||
|
||||
|
||||
def load_private_key(data_dir: Path, username: str):
|
||||
"""Load private key for signing."""
|
||||
keys_dir = get_keys_dir(data_dir)
|
||||
private_path = keys_dir / f"{username}.pem"
|
||||
|
||||
if not private_path.exists():
|
||||
raise FileNotFoundError(f"Private key not found: {private_path}")
|
||||
|
||||
private_pem = private_path.read_text()
|
||||
return serialization.load_pem_private_key(
|
||||
private_pem.encode(),
|
||||
password=None
|
||||
)
|
||||
|
||||
|
||||
def load_public_key_pem(data_dir: Path, username: str) -> str:
|
||||
"""Load public key PEM for actor profile."""
|
||||
keys_dir = get_keys_dir(data_dir)
|
||||
public_path = keys_dir / f"{username}.pub"
|
||||
|
||||
if not public_path.exists():
|
||||
raise FileNotFoundError(f"Public key not found: {public_path}")
|
||||
|
||||
return public_path.read_text()
|
||||
|
||||
|
||||
def has_keys(data_dir: Path, username: str) -> bool:
|
||||
"""Check if keys exist for user."""
|
||||
keys_dir = get_keys_dir(data_dir)
|
||||
return (keys_dir / f"{username}.pem").exists()
|
||||
|
||||
|
||||
def sign_data(private_key, data: str) -> str:
|
||||
"""Sign data with private key, return base64 signature."""
|
||||
signature = private_key.sign(
|
||||
data.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256()
|
||||
)
|
||||
return base64.b64encode(signature).decode()
|
||||
|
||||
|
||||
def create_signature(data_dir: Path, username: str, domain: str, activity: dict) -> dict:
|
||||
"""Create RsaSignature2017 for an activity."""
|
||||
private_key = load_private_key(data_dir, username)
|
||||
|
||||
# Create canonical JSON for signing
|
||||
canonical = json.dumps(activity, sort_keys=True, separators=(',', ':'))
|
||||
|
||||
# Sign
|
||||
signature_value = sign_data(private_key, canonical)
|
||||
|
||||
return {
|
||||
"type": "RsaSignature2017",
|
||||
"creator": f"https://{domain}/users/{username}#main-key",
|
||||
"created": datetime.now(timezone.utc).isoformat(),
|
||||
"signatureValue": signature_value
|
||||
}
|
||||
245
l2/migrate.py
Executable file
245
l2/migrate.py
Executable file
@@ -0,0 +1,245 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration script: JSON files to PostgreSQL.
|
||||
|
||||
Usage:
|
||||
python migrate.py [--dry-run]
|
||||
|
||||
Migrates:
|
||||
- users.json -> users table
|
||||
- registry.json -> assets table
|
||||
- activities.json -> activities table
|
||||
- followers.json -> followers table
|
||||
|
||||
Does NOT migrate:
|
||||
- keys/ directory (stays as files)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
from uuid import UUID
|
||||
|
||||
import asyncpg
|
||||
|
||||
# Configuration
|
||||
DATA_DIR = Path(os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2")))
|
||||
DATABASE_URL = os.environ.get("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
raise RuntimeError("DATABASE_URL environment variable is required")
|
||||
|
||||
SCHEMA = """
|
||||
-- Drop existing tables (careful in production!)
|
||||
DROP TABLE IF EXISTS followers CASCADE;
|
||||
DROP TABLE IF EXISTS activities CASCADE;
|
||||
DROP TABLE IF EXISTS assets CASCADE;
|
||||
DROP TABLE IF EXISTS users CASCADE;
|
||||
|
||||
-- Users table
|
||||
CREATE TABLE users (
|
||||
username VARCHAR(255) PRIMARY KEY,
|
||||
password_hash VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Assets table
|
||||
CREATE TABLE assets (
|
||||
name VARCHAR(255) PRIMARY KEY,
|
||||
content_hash VARCHAR(128) NOT NULL,
|
||||
asset_type VARCHAR(50) NOT NULL,
|
||||
tags JSONB DEFAULT '[]'::jsonb,
|
||||
metadata JSONB DEFAULT '{}'::jsonb,
|
||||
url TEXT,
|
||||
provenance JSONB,
|
||||
description TEXT,
|
||||
origin JSONB,
|
||||
owner VARCHAR(255) NOT NULL REFERENCES users(username),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ
|
||||
);
|
||||
|
||||
-- Activities table
|
||||
CREATE TABLE activities (
|
||||
activity_id UUID PRIMARY KEY,
|
||||
activity_type VARCHAR(50) NOT NULL,
|
||||
actor_id TEXT NOT NULL,
|
||||
object_data JSONB NOT NULL,
|
||||
published TIMESTAMPTZ NOT NULL,
|
||||
signature JSONB
|
||||
);
|
||||
|
||||
-- Followers table
|
||||
CREATE TABLE followers (
|
||||
id SERIAL PRIMARY KEY,
|
||||
username VARCHAR(255) NOT NULL REFERENCES users(username),
|
||||
acct VARCHAR(255) NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
public_key TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE(username, acct)
|
||||
);
|
||||
|
||||
-- Indexes
|
||||
CREATE INDEX idx_users_created_at ON users(created_at);
|
||||
CREATE INDEX idx_assets_content_hash ON assets(content_hash);
|
||||
CREATE INDEX idx_assets_owner ON assets(owner);
|
||||
CREATE INDEX idx_assets_created_at ON assets(created_at DESC);
|
||||
CREATE INDEX idx_assets_tags ON assets USING GIN(tags);
|
||||
CREATE INDEX idx_activities_actor_id ON activities(actor_id);
|
||||
CREATE INDEX idx_activities_published ON activities(published DESC);
|
||||
CREATE INDEX idx_followers_username ON followers(username);
|
||||
"""
|
||||
|
||||
|
||||
async def migrate(dry_run: bool = False):
|
||||
"""Run the migration."""
|
||||
print(f"Migrating from {DATA_DIR} to PostgreSQL")
|
||||
print(f"Database: {DATABASE_URL}")
|
||||
print(f"Dry run: {dry_run}")
|
||||
print()
|
||||
|
||||
# Load JSON files
|
||||
users = load_json(DATA_DIR / "users.json") or {}
|
||||
registry = load_json(DATA_DIR / "registry.json") or {"assets": {}}
|
||||
activities_data = load_json(DATA_DIR / "activities.json") or {"activities": []}
|
||||
followers = load_json(DATA_DIR / "followers.json") or []
|
||||
|
||||
assets = registry.get("assets", {})
|
||||
activities = activities_data.get("activities", [])
|
||||
|
||||
print(f"Found {len(users)} users")
|
||||
print(f"Found {len(assets)} assets")
|
||||
print(f"Found {len(activities)} activities")
|
||||
print(f"Found {len(followers)} followers")
|
||||
print()
|
||||
|
||||
if dry_run:
|
||||
print("DRY RUN - no changes made")
|
||||
return
|
||||
|
||||
# Connect and migrate
|
||||
conn = await asyncpg.connect(DATABASE_URL)
|
||||
try:
|
||||
# Create schema
|
||||
print("Creating schema...")
|
||||
await conn.execute(SCHEMA)
|
||||
|
||||
# Migrate users
|
||||
print("Migrating users...")
|
||||
for username, user_data in users.items():
|
||||
await conn.execute(
|
||||
"""INSERT INTO users (username, password_hash, email, created_at)
|
||||
VALUES ($1, $2, $3, $4)""",
|
||||
username,
|
||||
user_data["password_hash"],
|
||||
user_data.get("email"),
|
||||
parse_timestamp(user_data.get("created_at"))
|
||||
)
|
||||
print(f" Migrated {len(users)} users")
|
||||
|
||||
# Migrate assets
|
||||
print("Migrating assets...")
|
||||
for name, asset in assets.items():
|
||||
await conn.execute(
|
||||
"""INSERT INTO assets (name, content_hash, asset_type, tags, metadata,
|
||||
url, provenance, description, origin, owner,
|
||||
created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)""",
|
||||
name,
|
||||
asset["content_hash"],
|
||||
asset["asset_type"],
|
||||
json.dumps(asset.get("tags", [])),
|
||||
json.dumps(asset.get("metadata", {})),
|
||||
asset.get("url"),
|
||||
json.dumps(asset.get("provenance")) if asset.get("provenance") else None,
|
||||
asset.get("description"),
|
||||
json.dumps(asset.get("origin")) if asset.get("origin") else None,
|
||||
asset["owner"],
|
||||
parse_timestamp(asset.get("created_at")),
|
||||
parse_timestamp(asset.get("updated_at"))
|
||||
)
|
||||
print(f" Migrated {len(assets)} assets")
|
||||
|
||||
# Migrate activities
|
||||
print("Migrating activities...")
|
||||
for activity in activities:
|
||||
await conn.execute(
|
||||
"""INSERT INTO activities (activity_id, activity_type, actor_id,
|
||||
object_data, published, signature)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)""",
|
||||
UUID(activity["activity_id"]),
|
||||
activity["activity_type"],
|
||||
activity["actor_id"],
|
||||
json.dumps(activity["object_data"]),
|
||||
parse_timestamp(activity["published"]),
|
||||
json.dumps(activity.get("signature")) if activity.get("signature") else None
|
||||
)
|
||||
print(f" Migrated {len(activities)} activities")
|
||||
|
||||
# Migrate followers
|
||||
print("Migrating followers...")
|
||||
if followers and users:
|
||||
first_user = list(users.keys())[0]
|
||||
migrated = 0
|
||||
for follower in followers:
|
||||
if isinstance(follower, str):
|
||||
# Old format: just URL string
|
||||
await conn.execute(
|
||||
"""INSERT INTO followers (username, acct, url)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT DO NOTHING""",
|
||||
first_user,
|
||||
follower,
|
||||
follower
|
||||
)
|
||||
migrated += 1
|
||||
elif isinstance(follower, dict):
|
||||
await conn.execute(
|
||||
"""INSERT INTO followers (username, acct, url, public_key)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
ON CONFLICT DO NOTHING""",
|
||||
follower.get("username", first_user),
|
||||
follower.get("acct", follower.get("url", "")),
|
||||
follower["url"],
|
||||
follower.get("public_key")
|
||||
)
|
||||
migrated += 1
|
||||
print(f" Migrated {migrated} followers")
|
||||
else:
|
||||
print(" No followers to migrate")
|
||||
|
||||
print()
|
||||
print("Migration complete!")
|
||||
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict | list | None:
|
||||
"""Load JSON file if it exists."""
|
||||
if path.exists():
|
||||
with open(path) as f:
|
||||
return json.load(f)
|
||||
return None
|
||||
|
||||
|
||||
def parse_timestamp(ts: str | None) -> datetime | None:
|
||||
"""Parse ISO timestamp string to datetime."""
|
||||
if not ts:
|
||||
return datetime.now(timezone.utc)
|
||||
try:
|
||||
# Handle various ISO formats
|
||||
if ts.endswith('Z'):
|
||||
ts = ts[:-1] + '+00:00'
|
||||
return datetime.fromisoformat(ts)
|
||||
except Exception:
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
dry_run = "--dry-run" in sys.argv
|
||||
asyncio.run(migrate(dry_run))
|
||||
13
l2/requirements.txt
Normal file
13
l2/requirements.txt
Normal file
@@ -0,0 +1,13 @@
|
||||
fastapi>=0.109.0
|
||||
uvicorn>=0.27.0
|
||||
requests>=2.31.0
|
||||
httpx>=0.27.0
|
||||
cryptography>=42.0.0
|
||||
bcrypt>=4.0.0
|
||||
python-jose[cryptography]>=3.3.0
|
||||
markdown>=3.5.0
|
||||
python-multipart>=0.0.6
|
||||
asyncpg>=0.29.0
|
||||
boto3>=1.34.0
|
||||
# Shared components
|
||||
git+https://git.rose-ash.com/art-dag/common.git@889ea98
|
||||
26
l2/server.py
Normal file
26
l2/server.py
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Art DAG L2 Server - ActivityPub
|
||||
|
||||
Minimal entry point that uses the modular app factory.
|
||||
All routes are defined in app/routers/.
|
||||
All templates are in app/templates/.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s %(levelname)s %(name)s: %(message)s'
|
||||
)
|
||||
|
||||
# Import the app from the factory
|
||||
from app import app
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
host = os.environ.get("HOST", "0.0.0.0")
|
||||
port = int(os.environ.get("PORT", "8200"))
|
||||
uvicorn.run("server:app", host=host, port=port, workers=4)
|
||||
3765
l2/server_legacy.py
Normal file
3765
l2/server_legacy.py
Normal file
File diff suppressed because it is too large
Load Diff
51
l2/setup_keys.py
Executable file
51
l2/setup_keys.py
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate RSA keypair for ActivityPub signing.
|
||||
|
||||
Usage:
|
||||
python setup_keys.py [--data-dir /path/to/data] [--user username]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from keys import generate_keypair, has_keys, get_keys_dir
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Generate RSA keypair for L2 server")
|
||||
parser.add_argument("--data-dir", default=os.environ.get("ARTDAG_DATA", str(Path.home() / ".artdag" / "l2")),
|
||||
help="Data directory")
|
||||
parser.add_argument("--user", default=os.environ.get("ARTDAG_USER", "giles"),
|
||||
help="Username")
|
||||
parser.add_argument("--force", action="store_true",
|
||||
help="Overwrite existing keys")
|
||||
|
||||
args = parser.parse_args()
|
||||
data_dir = Path(args.data_dir)
|
||||
username = args.user
|
||||
|
||||
print(f"Data directory: {data_dir}")
|
||||
print(f"Username: {username}")
|
||||
|
||||
if has_keys(data_dir, username) and not args.force:
|
||||
print(f"\nKeys already exist for {username}!")
|
||||
print(f" Private: {get_keys_dir(data_dir) / f'{username}.pem'}")
|
||||
print(f" Public: {get_keys_dir(data_dir) / f'{username}.pub'}")
|
||||
print("\nUse --force to regenerate (will invalidate existing signatures)")
|
||||
return
|
||||
|
||||
print("\nGenerating RSA-2048 keypair...")
|
||||
private_pem, public_pem = generate_keypair(data_dir, username)
|
||||
|
||||
keys_dir = get_keys_dir(data_dir)
|
||||
print(f"\nKeys generated:")
|
||||
print(f" Private: {keys_dir / f'{username}.pem'} (chmod 600)")
|
||||
print(f" Public: {keys_dir / f'{username}.pub'}")
|
||||
print(f"\nPublic key (for verification):")
|
||||
print(public_pem)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1009
l2/storage_providers.py
Normal file
1009
l2/storage_providers.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user