Fix media list duplicates and cache browse link

- Database: Use DISTINCT ON to deduplicate items by content_hash
- Database: Count unique content_hashes in count_user_items
- Server: Fix media card link from /ui/cache to /cache
- Server: Use /raw endpoint for image thumbnails
- Server: Add seen_hashes dedup in media list iteration

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
gilesb
2026-01-09 10:17:35 +00:00
parent ad63774acd
commit 9c158ff884
2 changed files with 38 additions and 18 deletions

View File

@@ -924,17 +924,21 @@ async def save_l2_share(
async def get_user_items(actor_id: str, item_type: Optional[str] = None, limit: int = 100, offset: int = 0) -> List[dict]:
"""Get all items for a user, optionally filtered by type."""
"""Get all items for a user, optionally filtered by type. Deduplicates by content_hash."""
async with pool.acquire() as conn:
if item_type:
rows = await conn.fetch(
"""
SELECT it.content_hash, it.type, it.description, it.filename, it.pinned, it.created_at,
ci.ipfs_cid
FROM item_types it
JOIN cache_items ci ON it.content_hash = ci.content_hash
WHERE it.actor_id = $1 AND it.type = $2
ORDER BY it.created_at DESC
SELECT * FROM (
SELECT DISTINCT ON (it.content_hash)
it.content_hash, it.type, it.description, it.filename, it.pinned, it.created_at,
ci.ipfs_cid
FROM item_types it
JOIN cache_items ci ON it.content_hash = ci.content_hash
WHERE it.actor_id = $1 AND it.type = $2
ORDER BY it.content_hash, it.created_at DESC
) deduped
ORDER BY created_at DESC
LIMIT $3 OFFSET $4
""",
actor_id, item_type, limit, offset
@@ -942,12 +946,16 @@ async def get_user_items(actor_id: str, item_type: Optional[str] = None, limit:
else:
rows = await conn.fetch(
"""
SELECT it.content_hash, it.type, it.description, it.filename, it.pinned, it.created_at,
ci.ipfs_cid
FROM item_types it
JOIN cache_items ci ON it.content_hash = ci.content_hash
WHERE it.actor_id = $1
ORDER BY it.created_at DESC
SELECT * FROM (
SELECT DISTINCT ON (it.content_hash)
it.content_hash, it.type, it.description, it.filename, it.pinned, it.created_at,
ci.ipfs_cid
FROM item_types it
JOIN cache_items ci ON it.content_hash = ci.content_hash
WHERE it.actor_id = $1
ORDER BY it.content_hash, it.created_at DESC
) deduped
ORDER BY created_at DESC
LIMIT $2 OFFSET $3
""",
actor_id, limit, offset
@@ -968,15 +976,15 @@ async def get_user_items(actor_id: str, item_type: Optional[str] = None, limit:
async def count_user_items(actor_id: str, item_type: Optional[str] = None) -> int:
"""Count items for a user."""
"""Count unique items (by content_hash) for a user."""
async with pool.acquire() as conn:
if item_type:
return await conn.fetchval(
"SELECT COUNT(*) FROM item_types WHERE actor_id = $1 AND type = $2",
"SELECT COUNT(DISTINCT content_hash) FROM item_types WHERE actor_id = $1 AND type = $2",
actor_id, item_type
)
else:
return await conn.fetchval(
"SELECT COUNT(*) FROM item_types WHERE actor_id = $1",
"SELECT COUNT(DISTINCT content_hash) FROM item_types WHERE actor_id = $1",
actor_id
)