All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 4m48s
- fetch_fragment_batch() for N+1 avoidance with per-key Redis cache - link-card fragment handlers in blog, market, events, federation (single + batch mode) - link_card.html templates per app with content-specific previews - shared/infrastructure/oembed.py: build_oembed_response, build_og_meta, build_oembed_link_tag - GET /oembed routes on blog, market, events - og_meta + oembed_link rendering in base template <head> - INTERNAL_URL_ARTDAG in docker-compose.yml for cross-stack fragment fetches Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
303 lines
8.8 KiB
Python
303 lines
8.8 KiB
Python
"""
|
|
Server-side fragment composition client.
|
|
|
|
Each coop app exposes HTML fragments at ``/internal/fragments/{type}``.
|
|
This module provides helpers to fetch and cache those fragments so that
|
|
consuming apps can compose cross-app UI without shared templates.
|
|
|
|
Failures raise ``FragmentError`` by default so broken fragments are
|
|
immediately visible rather than silently missing from the page.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import asyncio
|
|
import logging
|
|
import os
|
|
from typing import Sequence
|
|
|
|
import httpx
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
# Re-usable async client (created lazily, one per process)
|
|
_client: httpx.AsyncClient | None = None
|
|
|
|
# Default request timeout (seconds)
|
|
_DEFAULT_TIMEOUT = 2.0
|
|
|
|
# Header sent on every fragment request so providers can distinguish
|
|
# fragment fetches from normal browser traffic.
|
|
FRAGMENT_HEADER = "X-Fragment-Request"
|
|
|
|
|
|
class FragmentError(Exception):
|
|
"""Raised when a fragment fetch fails."""
|
|
|
|
|
|
def _get_client() -> httpx.AsyncClient:
|
|
global _client
|
|
if _client is None or _client.is_closed:
|
|
_client = httpx.AsyncClient(
|
|
timeout=httpx.Timeout(_DEFAULT_TIMEOUT),
|
|
follow_redirects=False,
|
|
)
|
|
return _client
|
|
|
|
|
|
def _internal_url(app_name: str) -> str:
|
|
"""Resolve the Docker-internal base URL for *app_name*.
|
|
|
|
Looks up ``INTERNAL_URL_{APP}`` first, falls back to
|
|
``http://{app}:8000``.
|
|
"""
|
|
env_key = f"INTERNAL_URL_{app_name.upper()}"
|
|
return os.getenv(env_key, f"http://{app_name}:8000").rstrip("/")
|
|
|
|
|
|
# ------------------------------------------------------------------
|
|
# Public API
|
|
# ------------------------------------------------------------------
|
|
|
|
def _is_fragment_request() -> bool:
|
|
"""True when the current request is itself a fragment fetch."""
|
|
try:
|
|
from quart import request as _req
|
|
return bool(_req.headers.get(FRAGMENT_HEADER))
|
|
except Exception:
|
|
return False
|
|
|
|
|
|
async def fetch_fragment(
|
|
app_name: str,
|
|
fragment_type: str,
|
|
*,
|
|
params: dict | None = None,
|
|
timeout: float = _DEFAULT_TIMEOUT,
|
|
required: bool = True,
|
|
) -> str:
|
|
"""Fetch an HTML fragment from another app.
|
|
|
|
Returns the raw HTML string. When *required* is True (default),
|
|
raises ``FragmentError`` on network errors or non-200 responses.
|
|
When *required* is False, returns ``""`` on failure.
|
|
|
|
Automatically returns ``""`` when called inside a fragment request
|
|
to prevent circular dependencies between apps.
|
|
"""
|
|
if _is_fragment_request():
|
|
return ""
|
|
|
|
base = _internal_url(app_name)
|
|
url = f"{base}/internal/fragments/{fragment_type}"
|
|
try:
|
|
resp = await _get_client().get(
|
|
url,
|
|
params=params,
|
|
headers={FRAGMENT_HEADER: "1"},
|
|
timeout=timeout,
|
|
)
|
|
if resp.status_code == 200:
|
|
return resp.text
|
|
msg = f"Fragment {app_name}/{fragment_type} returned {resp.status_code}"
|
|
if required:
|
|
log.error(msg)
|
|
raise FragmentError(msg)
|
|
log.warning(msg)
|
|
return ""
|
|
except FragmentError:
|
|
raise
|
|
except Exception as exc:
|
|
msg = f"Fragment {app_name}/{fragment_type} failed: {exc}"
|
|
if required:
|
|
log.error(msg)
|
|
raise FragmentError(msg) from exc
|
|
log.warning(msg)
|
|
return ""
|
|
|
|
|
|
async def fetch_fragments(
|
|
requests: Sequence[tuple[str, str, dict | None]],
|
|
*,
|
|
timeout: float = _DEFAULT_TIMEOUT,
|
|
required: bool = True,
|
|
) -> list[str]:
|
|
"""Fetch multiple fragments concurrently.
|
|
|
|
*requests* is a sequence of ``(app_name, fragment_type, params)`` tuples.
|
|
Returns a list of HTML strings in the same order. When *required*
|
|
is True, any single failure raises ``FragmentError``.
|
|
"""
|
|
return list(await asyncio.gather(*(
|
|
fetch_fragment(app, ftype, params=params, timeout=timeout, required=required)
|
|
for app, ftype, params in requests
|
|
)))
|
|
|
|
|
|
async def fetch_fragment_batch(
|
|
app_name: str,
|
|
fragment_type: str,
|
|
*,
|
|
keys: list[str],
|
|
params: dict | None = None,
|
|
ttl: int = 30,
|
|
timeout: float = _DEFAULT_TIMEOUT,
|
|
required: bool = True,
|
|
) -> dict[str, str]:
|
|
"""Fetch a batched fragment keyed by multiple identifiers.
|
|
|
|
The provider receives *keys* as a comma-separated ``keys`` query param
|
|
and returns HTML with ``<!-- fragment:{key} -->`` comment markers
|
|
delimiting each entry. Returns ``dict[key, html]`` with ``""`` for
|
|
missing keys.
|
|
|
|
Individual results are cached in Redis per key.
|
|
"""
|
|
if _is_fragment_request() or not keys:
|
|
return {k: "" for k in keys}
|
|
|
|
redis = _get_redis()
|
|
results: dict[str, str] = {}
|
|
missing: list[str] = []
|
|
|
|
# Build base suffix from extra params
|
|
psuffix = ""
|
|
if params:
|
|
sorted_items = sorted(params.items())
|
|
psuffix = ":" + "&".join(f"{k}={v}" for k, v in sorted_items)
|
|
|
|
# Check Redis for individually cached keys
|
|
for key in keys:
|
|
cache_key = f"frag:{app_name}:{fragment_type}:{key}{psuffix}"
|
|
if redis and ttl > 0:
|
|
try:
|
|
cached = await redis.get(cache_key)
|
|
if cached is not None:
|
|
results[key] = cached.decode() if isinstance(cached, bytes) else cached
|
|
continue
|
|
except Exception:
|
|
pass
|
|
missing.append(key)
|
|
|
|
if not missing:
|
|
return results
|
|
|
|
# Fetch missing keys in one request
|
|
fetch_params = dict(params or {})
|
|
fetch_params["keys"] = ",".join(missing)
|
|
|
|
try:
|
|
html = await fetch_fragment(
|
|
app_name, fragment_type, params=fetch_params,
|
|
timeout=timeout, required=required,
|
|
)
|
|
except FragmentError:
|
|
for key in missing:
|
|
results.setdefault(key, "")
|
|
if required:
|
|
raise
|
|
return results
|
|
|
|
# Parse response by <!-- fragment:{key} --> markers
|
|
parsed = _parse_fragment_markers(html, missing)
|
|
for key in missing:
|
|
value = parsed.get(key, "")
|
|
results[key] = value
|
|
# Cache individual results
|
|
if redis and ttl > 0:
|
|
cache_key = f"frag:{app_name}:{fragment_type}:{key}{psuffix}"
|
|
try:
|
|
await redis.set(cache_key, value.encode(), ex=ttl)
|
|
except Exception:
|
|
pass
|
|
|
|
return results
|
|
|
|
|
|
def _parse_fragment_markers(html: str, keys: list[str]) -> dict[str, str]:
|
|
"""Split batched HTML by ``<!-- fragment:{key} -->`` comment markers."""
|
|
result: dict[str, str] = {}
|
|
marker_prefix = "<!-- fragment:"
|
|
marker_suffix = " -->"
|
|
|
|
for i, key in enumerate(keys):
|
|
start_marker = f"{marker_prefix}{key}{marker_suffix}"
|
|
start_idx = html.find(start_marker)
|
|
if start_idx == -1:
|
|
result[key] = ""
|
|
continue
|
|
content_start = start_idx + len(start_marker)
|
|
|
|
# Find next marker or end of string
|
|
next_marker_idx = len(html)
|
|
for other_key in keys:
|
|
if other_key == key:
|
|
continue
|
|
other_marker = f"{marker_prefix}{other_key}{marker_suffix}"
|
|
idx = html.find(other_marker, content_start)
|
|
if idx != -1 and idx < next_marker_idx:
|
|
next_marker_idx = idx
|
|
|
|
result[key] = html[content_start:next_marker_idx].strip()
|
|
|
|
return result
|
|
|
|
|
|
async def fetch_fragment_cached(
|
|
app_name: str,
|
|
fragment_type: str,
|
|
*,
|
|
params: dict | None = None,
|
|
ttl: int = 30,
|
|
timeout: float = _DEFAULT_TIMEOUT,
|
|
required: bool = True,
|
|
) -> str:
|
|
"""Fetch a fragment with a Redis cache layer.
|
|
|
|
Cache key: ``frag:{app}:{type}:{sorted_params}``.
|
|
"""
|
|
# Build a stable cache key
|
|
suffix = ""
|
|
if params:
|
|
sorted_items = sorted(params.items())
|
|
suffix = ":" + "&".join(f"{k}={v}" for k, v in sorted_items)
|
|
cache_key = f"frag:{app_name}:{fragment_type}{suffix}"
|
|
|
|
# Try Redis cache
|
|
redis = _get_redis()
|
|
if redis:
|
|
try:
|
|
cached = await redis.get(cache_key)
|
|
if cached is not None:
|
|
return cached.decode() if isinstance(cached, bytes) else cached
|
|
except Exception:
|
|
pass
|
|
|
|
# Cache miss — fetch from provider
|
|
html = await fetch_fragment(
|
|
app_name, fragment_type, params=params, timeout=timeout, required=required,
|
|
)
|
|
|
|
# Store in cache (even empty string — avoids hammering a down service)
|
|
if redis and ttl > 0:
|
|
try:
|
|
await redis.set(cache_key, html.encode(), ex=ttl)
|
|
except Exception:
|
|
pass
|
|
|
|
return html
|
|
|
|
|
|
# ------------------------------------------------------------------
|
|
# Helpers
|
|
# ------------------------------------------------------------------
|
|
|
|
def _get_redis():
|
|
"""Return the current app's Redis connection, or None."""
|
|
try:
|
|
from quart import current_app
|
|
r = current_app.redis
|
|
return r if r else None
|
|
except Exception:
|
|
return None
|