Add federation/ActivityPub models, contracts, and services
Phase 0+1 of ActivityPub integration: - 6 ORM models (ActorProfile, APActivity, APFollower, APInboxItem, APAnchor, IPFSPin) - FederationService protocol + SqlFederationService implementation + stub - 4 DTOs (ActorProfileDTO, APActivityDTO, APFollowerDTO, APAnchorDTO) - Registry slot for federation service - Alembic migration for federation tables - IPFS async client (httpx-based) - HTTP Signatures (RSA-2048 sign/verify) - login_url() now uses AUTH_APP env var for flexible auth routing Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
181
utils/http_signatures.py
Normal file
181
utils/http_signatures.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""RSA key generation and HTTP Signature signing/verification.
|
||||
|
||||
Keys are stored in DB (ActorProfile), not the filesystem.
|
||||
Ported from ~/art-dag/activity-pub/keys.py.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa, padding
|
||||
|
||||
|
||||
def generate_rsa_keypair() -> tuple[str, str]:
|
||||
"""Generate an RSA-2048 keypair.
|
||||
|
||||
Returns:
|
||||
(private_pem, public_pem) as UTF-8 strings.
|
||||
"""
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
)
|
||||
|
||||
private_pem = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
).decode()
|
||||
|
||||
public_pem = private_key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
).decode()
|
||||
|
||||
return private_pem, public_pem
|
||||
|
||||
|
||||
def sign_request(
|
||||
private_key_pem: str,
|
||||
key_id: str,
|
||||
method: str,
|
||||
path: str,
|
||||
host: str,
|
||||
body: bytes | None = None,
|
||||
date: str | None = None,
|
||||
) -> dict[str, str]:
|
||||
"""Build HTTP Signature headers for an outgoing request.
|
||||
|
||||
Returns a dict of headers to merge into the request:
|
||||
``{"Signature": ..., "Date": ..., "Digest": ..., "Host": ...}``
|
||||
"""
|
||||
if date is None:
|
||||
date = datetime.now(timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
|
||||
headers_to_sign = [
|
||||
f"(request-target): {method.lower()} {path}",
|
||||
f"host: {host}",
|
||||
f"date: {date}",
|
||||
]
|
||||
|
||||
out_headers: dict[str, str] = {
|
||||
"Host": host,
|
||||
"Date": date,
|
||||
}
|
||||
|
||||
if body is not None:
|
||||
digest = base64.b64encode(hashlib.sha256(body).digest()).decode()
|
||||
digest_header = f"SHA-256={digest}"
|
||||
headers_to_sign.append(f"digest: {digest_header}")
|
||||
out_headers["Digest"] = digest_header
|
||||
|
||||
signed_string = "\n".join(headers_to_sign)
|
||||
header_names = " ".join(
|
||||
h.split(":")[0] for h in headers_to_sign
|
||||
)
|
||||
|
||||
private_key = serialization.load_pem_private_key(
|
||||
private_key_pem.encode(), password=None,
|
||||
)
|
||||
signature_bytes = private_key.sign(
|
||||
signed_string.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
signature_b64 = base64.b64encode(signature_bytes).decode()
|
||||
|
||||
out_headers["Signature"] = (
|
||||
f'keyId="{key_id}",'
|
||||
f'headers="{header_names}",'
|
||||
f'signature="{signature_b64}",'
|
||||
f'algorithm="rsa-sha256"'
|
||||
)
|
||||
|
||||
return out_headers
|
||||
|
||||
|
||||
def verify_request_signature(
|
||||
public_key_pem: str,
|
||||
signature_header: str,
|
||||
method: str,
|
||||
path: str,
|
||||
headers: dict[str, str],
|
||||
) -> bool:
|
||||
"""Verify an incoming HTTP Signature.
|
||||
|
||||
Args:
|
||||
public_key_pem: PEM-encoded public key of the sender.
|
||||
signature_header: Value of the ``Signature`` header.
|
||||
method: HTTP method (GET, POST, etc.).
|
||||
path: Request path (e.g. ``/users/alice/inbox``).
|
||||
headers: All request headers (case-insensitive keys).
|
||||
|
||||
Returns:
|
||||
True if the signature is valid.
|
||||
"""
|
||||
# Parse Signature header
|
||||
parts: dict[str, str] = {}
|
||||
for part in signature_header.split(","):
|
||||
part = part.strip()
|
||||
eq = part.index("=")
|
||||
key = part[:eq]
|
||||
val = part[eq + 1:].strip('"')
|
||||
parts[key] = val
|
||||
|
||||
signed_headers = parts.get("headers", "date").split()
|
||||
signature_b64 = parts.get("signature", "")
|
||||
|
||||
# Reconstruct the signed string
|
||||
lines: list[str] = []
|
||||
# Normalize header lookup to lowercase
|
||||
lc_headers = {k.lower(): v for k, v in headers.items()}
|
||||
for h in signed_headers:
|
||||
if h == "(request-target)":
|
||||
lines.append(f"(request-target): {method.lower()} {path}")
|
||||
else:
|
||||
val = lc_headers.get(h, "")
|
||||
lines.append(f"{h}: {val}")
|
||||
|
||||
signed_string = "\n".join(lines)
|
||||
|
||||
public_key = serialization.load_pem_public_key(public_key_pem.encode())
|
||||
try:
|
||||
public_key.verify(
|
||||
base64.b64decode(signature_b64),
|
||||
signed_string.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def create_ld_signature(
|
||||
private_key_pem: str,
|
||||
key_id: str,
|
||||
activity: dict,
|
||||
) -> dict:
|
||||
"""Create an RsaSignature2017 Linked Data signature for an activity."""
|
||||
canonical = json.dumps(activity, sort_keys=True, separators=(",", ":"))
|
||||
|
||||
private_key = serialization.load_pem_private_key(
|
||||
private_key_pem.encode(), password=None,
|
||||
)
|
||||
signature_bytes = private_key.sign(
|
||||
canonical.encode(),
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256(),
|
||||
)
|
||||
signature_b64 = base64.b64encode(signature_bytes).decode()
|
||||
|
||||
return {
|
||||
"type": "RsaSignature2017",
|
||||
"creator": key_id,
|
||||
"created": datetime.now(timezone.utc).isoformat(),
|
||||
"signatureValue": signature_b64,
|
||||
}
|
||||
141
utils/ipfs_client.py
Normal file
141
utils/ipfs_client.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""Async IPFS client for content-addressed storage.
|
||||
|
||||
All content can be stored on IPFS — blog posts, products, activities, etc.
|
||||
Ported from ~/art-dag/activity-pub/ipfs_client.py (converted to async httpx).
|
||||
|
||||
Config via environment:
|
||||
IPFS_API — multiaddr or URL (default: /ip4/127.0.0.1/tcp/5001)
|
||||
IPFS_TIMEOUT — request timeout in seconds (default: 60)
|
||||
IPFS_GATEWAY_URL — public gateway for CID links (default: https://ipfs.io)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IPFSError(Exception):
|
||||
"""Raised when an IPFS operation fails."""
|
||||
|
||||
|
||||
# -- Config ------------------------------------------------------------------
|
||||
|
||||
IPFS_API = os.getenv("IPFS_API", "/ip4/127.0.0.1/tcp/5001")
|
||||
IPFS_TIMEOUT = int(os.getenv("IPFS_TIMEOUT", "60"))
|
||||
IPFS_GATEWAY_URL = os.getenv("IPFS_GATEWAY_URL", "https://ipfs.io")
|
||||
|
||||
|
||||
def _multiaddr_to_url(multiaddr: str) -> str:
|
||||
"""Convert IPFS multiaddr to HTTP URL."""
|
||||
dns_match = re.match(r"/dns[46]?/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if dns_match:
|
||||
return f"http://{dns_match.group(1)}:{dns_match.group(2)}"
|
||||
|
||||
ip4_match = re.match(r"/ip4/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if ip4_match:
|
||||
return f"http://{ip4_match.group(1)}:{ip4_match.group(2)}"
|
||||
|
||||
if multiaddr.startswith("http"):
|
||||
return multiaddr
|
||||
return "http://127.0.0.1:5001"
|
||||
|
||||
|
||||
IPFS_BASE_URL = _multiaddr_to_url(IPFS_API)
|
||||
|
||||
|
||||
# -- Async client functions --------------------------------------------------
|
||||
|
||||
async def add_bytes(data: bytes, *, pin: bool = True) -> str:
|
||||
"""Add raw bytes to IPFS.
|
||||
|
||||
Returns the CID.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/add",
|
||||
params={"pin": str(pin).lower()},
|
||||
files={"file": ("data", data)},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
cid = resp.json()["Hash"]
|
||||
logger.info("Added to IPFS: %d bytes -> %s", len(data), cid)
|
||||
return cid
|
||||
except Exception as e:
|
||||
logger.error("Failed to add bytes to IPFS: %s", e)
|
||||
raise IPFSError(f"Failed to add bytes: {e}") from e
|
||||
|
||||
|
||||
async def add_json(data: dict) -> str:
|
||||
"""Serialize dict to sorted JSON and add to IPFS."""
|
||||
json_bytes = json.dumps(data, indent=2, sort_keys=True).encode("utf-8")
|
||||
return await add_bytes(json_bytes, pin=True)
|
||||
|
||||
|
||||
async def get_bytes(cid: str) -> bytes | None:
|
||||
"""Fetch content from IPFS by CID."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/cat",
|
||||
params={"arg": cid},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
logger.info("Retrieved from IPFS: %s (%d bytes)", cid, len(resp.content))
|
||||
return resp.content
|
||||
except Exception as e:
|
||||
logger.error("Failed to get from IPFS: %s", e)
|
||||
return None
|
||||
|
||||
|
||||
async def pin_cid(cid: str) -> bool:
|
||||
"""Pin a CID on this node."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/pin/add",
|
||||
params={"arg": cid},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
logger.info("Pinned on IPFS: %s", cid)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("Failed to pin on IPFS: %s", e)
|
||||
return False
|
||||
|
||||
|
||||
async def unpin_cid(cid: str) -> bool:
|
||||
"""Unpin a CID from this node."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=IPFS_TIMEOUT) as client:
|
||||
resp = await client.post(
|
||||
f"{IPFS_BASE_URL}/api/v0/pin/rm",
|
||||
params={"arg": cid},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
logger.info("Unpinned from IPFS: %s", cid)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("Failed to unpin from IPFS: %s", e)
|
||||
return False
|
||||
|
||||
|
||||
async def is_available() -> bool:
|
||||
"""Check if IPFS daemon is reachable."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5) as client:
|
||||
resp = await client.post(f"{IPFS_BASE_URL}/api/v0/id")
|
||||
return resp.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def gateway_url(cid: str) -> str:
|
||||
"""Return a public gateway URL for a CID."""
|
||||
return f"{IPFS_GATEWAY_URL}/ipfs/{cid}"
|
||||
Reference in New Issue
Block a user