diff --git a/database.py b/database.py index a0fc0d5..1d59a6a 100644 --- a/database.py +++ b/database.py @@ -65,12 +65,19 @@ CREATE TABLE IF NOT EXISTS l2_shares ( actor_id VARCHAR(255) NOT NULL, l2_server VARCHAR(255) NOT NULL, asset_name VARCHAR(255) NOT NULL, + activity_id VARCHAR(128), content_type VARCHAR(50) NOT NULL, published_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), last_synced_at TIMESTAMP WITH TIME ZONE, UNIQUE(content_hash, actor_id, l2_server, content_type) ); +-- Add activity_id column if it doesn't exist (for existing databases) +DO $$ BEGIN + ALTER TABLE l2_shares ADD COLUMN IF NOT EXISTS activity_id VARCHAR(128); +EXCEPTION WHEN others THEN NULL; +END $$; + -- Run cache: maps content-addressable run_id to output -- run_id is a hash of (sorted inputs + recipe), making runs deterministic CREATE TABLE IF NOT EXISTS run_cache ( @@ -512,7 +519,7 @@ async def get_l2_shares(content_hash: str, actor_id: Optional[str] = None) -> Li if actor_id: rows = await conn.fetch( """ - SELECT id, content_hash, actor_id, l2_server, asset_name, content_type, published_at, last_synced_at + SELECT id, content_hash, actor_id, l2_server, asset_name, activity_id, content_type, published_at, last_synced_at FROM l2_shares WHERE content_hash = $1 AND actor_id = $2 ORDER BY published_at @@ -522,7 +529,7 @@ async def get_l2_shares(content_hash: str, actor_id: Optional[str] = None) -> Li else: rows = await conn.fetch( """ - SELECT id, content_hash, actor_id, l2_server, asset_name, content_type, published_at, last_synced_at + SELECT id, content_hash, actor_id, l2_server, asset_name, activity_id, content_type, published_at, last_synced_at FROM l2_shares WHERE content_hash = $1 ORDER BY published_at @@ -780,7 +787,7 @@ async def load_item_metadata(content_hash: str, actor_id: Optional[str] = None) if actor_id: shares = await conn.fetch( """ - SELECT l2_server, asset_name, content_type, published_at, last_synced_at + SELECT l2_server, asset_name, activity_id, content_type, published_at, last_synced_at FROM l2_shares WHERE content_hash = $1 AND actor_id = $2 """, content_hash, actor_id @@ -788,7 +795,7 @@ async def load_item_metadata(content_hash: str, actor_id: Optional[str] = None) else: shares = await conn.fetch( """ - SELECT l2_server, asset_name, content_type, published_at, last_synced_at + SELECT l2_server, asset_name, activity_id, content_type, published_at, last_synced_at FROM l2_shares WHERE content_hash = $1 """, content_hash @@ -799,6 +806,7 @@ async def load_item_metadata(content_hash: str, actor_id: Optional[str] = None) { "l2_server": s["l2_server"], "asset_name": s["asset_name"], + "activity_id": s["activity_id"], "content_type": s["content_type"], "published_at": s["published_at"].isoformat() if s["published_at"] else None, "last_synced_at": s["last_synced_at"].isoformat() if s["last_synced_at"] else None, @@ -810,6 +818,7 @@ async def load_item_metadata(content_hash: str, actor_id: Optional[str] = None) result["published"] = { "to_l2": True, "asset_name": shares[0]["asset_name"], + "activity_id": shares[0]["activity_id"], "l2_server": shares[0]["l2_server"], } @@ -944,24 +953,27 @@ async def save_l2_share( actor_id: str, l2_server: str, asset_name: str, - content_type: str = "media" + content_type: str = "media", + activity_id: Optional[str] = None ) -> dict: """Save an L2 share and return share info.""" async with pool.acquire() as conn: row = await conn.fetchrow( """ - INSERT INTO l2_shares (content_hash, actor_id, l2_server, asset_name, content_type, last_synced_at) - VALUES ($1, $2, $3, $4, $5, NOW()) + INSERT INTO l2_shares (content_hash, actor_id, l2_server, asset_name, activity_id, content_type, last_synced_at) + VALUES ($1, $2, $3, $4, $5, $6, NOW()) ON CONFLICT (content_hash, actor_id, l2_server, content_type) DO UPDATE SET asset_name = EXCLUDED.asset_name, + activity_id = COALESCE(EXCLUDED.activity_id, l2_shares.activity_id), last_synced_at = NOW() - RETURNING l2_server, asset_name, content_type, published_at, last_synced_at + RETURNING l2_server, asset_name, activity_id, content_type, published_at, last_synced_at """, - content_hash, actor_id, l2_server, asset_name, content_type + content_hash, actor_id, l2_server, asset_name, activity_id, content_type ) return { "l2_server": row["l2_server"], "asset_name": row["asset_name"], + "activity_id": row["activity_id"], "content_type": row["content_type"], "published_at": row["published_at"].isoformat() if row["published_at"] else None, "last_synced_at": row["last_synced_at"].isoformat() if row["last_synced_at"] else None, diff --git a/server.py b/server.py index 6787fd3..37994b1 100644 --- a/server.py +++ b/server.py @@ -1126,13 +1126,16 @@ async def run_detail(run_id: str, request: Request): l2_server = share.get("l2_server", "") l2_https = l2_server.replace("http://", "https://") asset_name = share.get("asset_name", "") + activity_id = share.get("activity_id") + # Link to activity if available, otherwise fall back to asset + l2_link = f"{l2_https}/activities/{activity_id}" if activity_id else f"{l2_https}/assets/{asset_name}" publish_html = f'''
- Published as {asset_name} - View on L2 + Published as {asset_name[:16]}... + View on L2