Compiler fixes: - Upvalue re-lookup returns own position (uv-index), not parent slot - Spec: cek-call uses (make-env) not (dict) — OCaml Dict≠Env - Bootstrap post-processes transpiler Dict→Env for cek_call VM runtime fixes: - compile_adapter evaluates constant defines (SPECIAL_FORM_NAMES etc.) via execute_module instead of wrapping as NativeFn closures - Native primitives: map-indexed, some, every? - Nil-safe HO forms: map/filter/for-each/some/every? accept nil as empty - expand-components? set in kernel env (not just VM globals) - unwrap_env diagnostic: reports actual type received sx-page-full command: - Single OCaml call: aser-slot body + render-to-html shell - Eliminates two pipe round-trips (was: aser-slot→Python→shell render) - Shell statics (component_defs, CSS, pages_sx) cached in Python, injected into kernel once, referenced by symbol in per-request command - Large blobs use placeholder tokens — Python splices post-render, pipe transfers ~51KB instead of 2MB Performance (warm): - Server total: 0.55s (was ~2s) - aser-slot VM: 0.3s, shell render: 0.01s, pipe: 0.06s - kwargs computation: 0.000s (cached) SX_STANDALONE mode for sx_docs dev (skips fragment fetches). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
741 lines
27 KiB
Python
741 lines
27 KiB
Python
"""
|
|
Jinja ↔ s-expression bridge.
|
|
|
|
Provides two-way integration so s-expression components and Jinja templates
|
|
can coexist during incremental migration:
|
|
|
|
**Jinja → s-expression** (use s-expression components inside Jinja templates)::
|
|
|
|
{{ sx('(~shared:fragments/link-card :slug "apple" :title "Apple")') | safe }}
|
|
|
|
**S-expression → Jinja** (embed Jinja output inside s-expressions)::
|
|
|
|
(raw! (jinja "fragments/link_card.html" :slug "apple" :title "Apple"))
|
|
|
|
Setup::
|
|
|
|
from shared.sx.jinja_bridge import setup_sx_bridge
|
|
setup_sx_bridge(app) # call after setup_jinja(app)
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import glob
|
|
import hashlib
|
|
import logging
|
|
import os
|
|
import pickle
|
|
import time
|
|
from typing import Any
|
|
|
|
from .types import NIL, Component, Island, Keyword, Lambda, Macro, Symbol
|
|
from .parser import parse
|
|
import os as _os
|
|
if _os.environ.get("SX_USE_OCAML") == "1":
|
|
# OCaml kernel bridge — render via persistent subprocess.
|
|
# html_render and _render_component are set up lazily since the bridge
|
|
# requires an async event loop. The sync sx() function falls back to
|
|
# the ref renderer; async callers use ocaml_bridge directly.
|
|
from .ref.sx_ref import render as html_render, render_html_component as _render_component
|
|
elif _os.environ.get("SX_USE_REF") == "1":
|
|
from .ref.sx_ref import render as html_render, render_html_component as _render_component
|
|
else:
|
|
from .html import render as html_render, _render_component
|
|
|
|
_logger = logging.getLogger("sx.bridge")
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Shared component environment
|
|
# ---------------------------------------------------------------------------
|
|
|
|
# Global component registry — populated at app startup by loading component
|
|
# definition files or calling register_components().
|
|
_COMPONENT_ENV: dict[str, Any] = {}
|
|
|
|
# SHA-256 hash (12 hex chars) of all component definitions — used for
|
|
# client-side localStorage caching.
|
|
_COMPONENT_HASH: str = ""
|
|
|
|
# Raw source of .sx files marked with ;; @client — sent to the browser
|
|
# alongside component definitions so define forms (functions, data) are
|
|
# available for client-side evaluation (e.g. cssx colour/spacing functions).
|
|
_CLIENT_LIBRARY_SOURCES: list[str] = []
|
|
|
|
|
|
def get_component_env() -> dict[str, Any]:
|
|
"""Return the shared component environment."""
|
|
return _COMPONENT_ENV
|
|
|
|
|
|
def get_component_hash() -> str:
|
|
"""Return the current component definitions hash."""
|
|
return _COMPONENT_HASH
|
|
|
|
|
|
def _compute_component_hash() -> None:
|
|
"""Recompute _COMPONENT_HASH from all registered Component and Macro definitions."""
|
|
global _COMPONENT_HASH
|
|
from .parser import serialize
|
|
parts = list(_CLIENT_LIBRARY_SOURCES)
|
|
for key in sorted(_COMPONENT_ENV):
|
|
val = _COMPONENT_ENV[key]
|
|
if isinstance(val, Island):
|
|
param_strs = ["&key"] + list(val.params)
|
|
if val.has_children:
|
|
param_strs.extend(["&rest", "children"])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body)
|
|
parts.append(f"(defisland ~{val.name} {params_sx} {body_sx})")
|
|
elif isinstance(val, Component):
|
|
param_strs = ["&key"] + list(val.params)
|
|
if val.has_children:
|
|
param_strs.extend(["&rest", "children"])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body)
|
|
parts.append(f"(defcomp ~{val.name} {params_sx} {body_sx})")
|
|
elif isinstance(val, Macro):
|
|
param_strs = list(val.params)
|
|
if val.rest_param:
|
|
param_strs.extend(["&rest", val.rest_param])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body)
|
|
parts.append(f"(defmacro {val.name} {params_sx} {body_sx})")
|
|
if parts:
|
|
digest = hashlib.sha256("\n".join(parts).encode()).hexdigest()[:12]
|
|
_COMPONENT_HASH = digest
|
|
else:
|
|
_COMPONENT_HASH = ""
|
|
|
|
|
|
_CACHE_DIR = os.path.join(os.path.dirname(__file__), ".cache")
|
|
|
|
|
|
def _cache_key_for_dir(directory: str, files: list[str]) -> str:
|
|
"""Compute a cache key from sorted file paths + mtimes + sizes."""
|
|
parts = []
|
|
for fp in files:
|
|
st = os.stat(fp)
|
|
parts.append(f"{fp}:{st.st_mtime_ns}:{st.st_size}")
|
|
return hashlib.sha256("\n".join(parts).encode()).hexdigest()[:16]
|
|
|
|
|
|
def _cache_path(directory: str, key: str) -> str:
|
|
"""Return the cache file path for a directory."""
|
|
dir_hash = hashlib.sha256(directory.encode()).hexdigest()[:12]
|
|
return os.path.join(_CACHE_DIR, f"sx_{dir_hash}_{key}.pkl")
|
|
|
|
|
|
def _try_load_cache(directory: str, files: list[str]) -> bool:
|
|
"""Try to restore components from a pickle cache.
|
|
|
|
Returns True if cache was valid and components were restored.
|
|
"""
|
|
key = _cache_key_for_dir(directory, files)
|
|
path = _cache_path(directory, key)
|
|
if not os.path.exists(path):
|
|
return False
|
|
try:
|
|
with open(path, "rb") as f:
|
|
cached = pickle.load(f)
|
|
_COMPONENT_ENV.update(cached["env"])
|
|
_CLIENT_LIBRARY_SOURCES.extend(cached["client_sources"])
|
|
_logger.info("Cache hit: %s (%d entries)", directory, len(cached["env"]))
|
|
return True
|
|
except Exception as e:
|
|
_logger.warning("Cache load failed for %s: %s", directory, e)
|
|
try:
|
|
os.remove(path)
|
|
except OSError:
|
|
pass
|
|
return False
|
|
|
|
|
|
def _save_cache(
|
|
directory: str,
|
|
files: list[str],
|
|
env_entries: dict[str, Any],
|
|
client_sources: list[str],
|
|
) -> None:
|
|
"""Save component env entries to a pickle cache."""
|
|
key = _cache_key_for_dir(directory, files)
|
|
path = _cache_path(directory, key)
|
|
try:
|
|
os.makedirs(_CACHE_DIR, exist_ok=True)
|
|
# Strip closures before pickling — they reference the global env
|
|
# and would bloat/fail the pickle. Closures are rebuilt after load.
|
|
stripped = _strip_closures(env_entries)
|
|
with open(path, "wb") as f:
|
|
pickle.dump({"env": stripped, "client_sources": client_sources}, f,
|
|
protocol=pickle.HIGHEST_PROTOCOL)
|
|
# Clean stale caches for this directory
|
|
dir_hash = hashlib.sha256(directory.encode()).hexdigest()[:12]
|
|
prefix = f"sx_{dir_hash}_"
|
|
for old in os.listdir(_CACHE_DIR):
|
|
if old.startswith(prefix) and old != os.path.basename(path):
|
|
try:
|
|
os.remove(os.path.join(_CACHE_DIR, old))
|
|
except OSError:
|
|
pass
|
|
except Exception as e:
|
|
_logger.warning("Cache save failed for %s: %s", directory, e)
|
|
|
|
|
|
def _strip_closures(env_entries: dict[str, Any]) -> dict[str, Any]:
|
|
"""Return a copy of env entries with closures emptied for pickling."""
|
|
out: dict[str, Any] = {}
|
|
for key, val in env_entries.items():
|
|
if isinstance(val, Component):
|
|
out[key] = Component(
|
|
name=val.name, params=list(val.params),
|
|
has_children=val.has_children, body=val.body,
|
|
closure={}, css_classes=set(val.css_classes),
|
|
deps=set(val.deps), io_refs=set(val.io_refs) if val.io_refs else None,
|
|
affinity=val.affinity, param_types=dict(val.param_types) if val.param_types else None,
|
|
)
|
|
elif isinstance(val, Island):
|
|
out[key] = Island(
|
|
name=val.name, params=list(val.params),
|
|
has_children=val.has_children, body=val.body,
|
|
closure={}, css_classes=set(val.css_classes),
|
|
deps=set(val.deps), io_refs=set(val.io_refs) if val.io_refs else None,
|
|
)
|
|
elif isinstance(val, Macro):
|
|
out[key] = Macro(
|
|
params=list(val.params), rest_param=val.rest_param,
|
|
body=val.body, closure={}, name=val.name,
|
|
)
|
|
elif isinstance(val, Lambda):
|
|
out[key] = Lambda(
|
|
params=list(val.params), body=val.body,
|
|
closure={}, name=val.name,
|
|
)
|
|
else:
|
|
# Basic values (dicts, lists, strings, numbers) — pickle directly
|
|
out[key] = val
|
|
return out
|
|
|
|
|
|
def _rebuild_closures() -> None:
|
|
"""Point all component/lambda closures at the global env.
|
|
|
|
After cache restore, closures are empty. The evaluator merges
|
|
closure + caller-env at call time, and the caller env is always
|
|
_COMPONENT_ENV, so this is safe.
|
|
"""
|
|
for val in _COMPONENT_ENV.values():
|
|
if isinstance(val, (Component, Island, Lambda, Macro)):
|
|
val.closure = _COMPONENT_ENV
|
|
|
|
|
|
_dirs_from_cache: set[str] = set()
|
|
|
|
|
|
def load_sx_dir(directory: str, *, _finalize: bool = True) -> None:
|
|
"""Load all .sx files from a directory and register components.
|
|
|
|
Skips boundary.sx — those are parsed separately by the boundary validator.
|
|
Files starting with ``;; @client`` have their source stored for delivery
|
|
to the browser (so ``define`` forms are available client-side).
|
|
|
|
Uses a pickle cache keyed by file mtimes — if no files changed,
|
|
components are restored from cache without parsing or evaluation.
|
|
"""
|
|
t0 = time.monotonic()
|
|
|
|
files = sorted(
|
|
fp for fp in glob.glob(os.path.join(directory, "**", "*.sx"), recursive=True)
|
|
if os.path.basename(fp) != "boundary.sx"
|
|
)
|
|
|
|
if not files:
|
|
return
|
|
|
|
# Try cache first
|
|
if _try_load_cache(directory, files):
|
|
_dirs_from_cache.add(directory)
|
|
if _finalize:
|
|
_rebuild_closures()
|
|
_finalize_if_needed()
|
|
t1 = time.monotonic()
|
|
_logger.info("Loaded %s from cache in %.1fms", directory, (t1 - t0) * 1000)
|
|
return
|
|
|
|
# Cache miss — full parse + eval
|
|
env_before = set(_COMPONENT_ENV.keys())
|
|
new_client_sources: list[str] = []
|
|
|
|
for filepath in files:
|
|
with open(filepath, encoding="utf-8") as f:
|
|
source = f.read()
|
|
if source.lstrip().startswith(";; @client"):
|
|
from .parser import parse_all, serialize
|
|
exprs = parse_all(source)
|
|
normalized = "\n".join(serialize(e) for e in exprs)
|
|
new_client_sources.append(normalized)
|
|
_CLIENT_LIBRARY_SOURCES.append(normalized)
|
|
register_components(source, _defer_postprocess=True)
|
|
|
|
if _finalize:
|
|
finalize_components()
|
|
|
|
# Save cache AFTER finalization so deps/io_refs are included
|
|
new_entries = {k: v for k, v in _COMPONENT_ENV.items() if k not in env_before}
|
|
_save_cache(directory, files, new_entries, new_client_sources)
|
|
|
|
t1 = time.monotonic()
|
|
_logger.info("Loaded %s (%d files, %d new) in %.1fms",
|
|
directory, len(files), len(new_entries), (t1 - t0) * 1000)
|
|
|
|
|
|
def _finalize_if_needed() -> None:
|
|
"""Skip heavy deps/io_refs recomputation if all directories were cached.
|
|
|
|
Cached components already have deps and io_refs populated.
|
|
Only the hash needs recomputing (it depends on all components).
|
|
"""
|
|
_compute_component_hash()
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Dev-mode auto-reload of sx templates
|
|
# ---------------------------------------------------------------------------
|
|
|
|
_watched_dirs: list[str] = []
|
|
_file_mtimes: dict[str, float] = {}
|
|
_reload_callbacks: list[Any] = []
|
|
|
|
|
|
def register_reload_callback(fn: Any) -> None:
|
|
"""Register a function to call after hot-reload clears and reloads components."""
|
|
_reload_callbacks.append(fn)
|
|
|
|
|
|
def watch_sx_dir(directory: str) -> None:
|
|
"""Register a directory for dev-mode file watching."""
|
|
_watched_dirs.append(directory)
|
|
# Seed mtimes
|
|
for fp in sorted(
|
|
glob.glob(os.path.join(directory, "**", "*.sx"), recursive=True)
|
|
):
|
|
_file_mtimes[fp] = os.path.getmtime(fp)
|
|
|
|
|
|
def reload_if_changed() -> None:
|
|
"""Re-read sx files if any have changed on disk. Called per-request in dev."""
|
|
reload_logger = logging.getLogger("sx.reload")
|
|
|
|
changed_files = []
|
|
for directory in _watched_dirs:
|
|
for fp in sorted(
|
|
glob.glob(os.path.join(directory, "**", "*.sx"), recursive=True)
|
|
):
|
|
mtime = os.path.getmtime(fp)
|
|
if fp not in _file_mtimes or _file_mtimes[fp] != mtime:
|
|
_file_mtimes[fp] = mtime
|
|
changed_files.append(fp)
|
|
if changed_files:
|
|
for fp in changed_files:
|
|
reload_logger.info("Changed: %s", fp)
|
|
t0 = time.monotonic()
|
|
_COMPONENT_ENV.clear()
|
|
_CLIENT_LIBRARY_SOURCES.clear()
|
|
_dirs_from_cache.clear()
|
|
invalidate_component_hash()
|
|
from .helpers import invalidate_shell_cache
|
|
invalidate_shell_cache()
|
|
# Reload SX libraries first (e.g. z3.sx) so reader macros resolve
|
|
for cb in _reload_callbacks:
|
|
cb()
|
|
# Load all directories with deferred finalization
|
|
for directory in _watched_dirs:
|
|
load_sx_dir(directory, _finalize=False)
|
|
# Finalize once after all directories are loaded
|
|
_rebuild_closures()
|
|
finalize_components()
|
|
t1 = time.monotonic()
|
|
reload_logger.info("Reloaded %d file(s), components in %.1fms",
|
|
len(changed_files), (t1 - t0) * 1000)
|
|
|
|
# Invalidate OCaml bridge component cache so next render reloads
|
|
if _os.environ.get("SX_USE_OCAML") == "1":
|
|
from .ocaml_bridge import _bridge
|
|
if _bridge is not None:
|
|
_bridge._components_loaded = False
|
|
_bridge._shell_statics_injected = False
|
|
_bridge._helpers_injected = False
|
|
|
|
# Recompute render plans for all services that have pages
|
|
from .pages import _PAGE_REGISTRY, compute_page_render_plans
|
|
for svc in _PAGE_REGISTRY:
|
|
t2 = time.monotonic()
|
|
compute_page_render_plans(svc)
|
|
reload_logger.info("Render plans for %s in %.1fms", svc, (time.monotonic() - t2) * 1000)
|
|
|
|
|
|
def load_service_components(service_dir: str, service_name: str | None = None) -> None:
|
|
"""Load service-specific s-expression components and handlers.
|
|
|
|
Components from ``{service_dir}/sx/`` and handlers from
|
|
``{service_dir}/sx/handlers/`` or ``{service_dir}/sx/handlers.sx``.
|
|
|
|
This is called after ``load_shared_components()`` which defers
|
|
finalization, so we finalize here (once for shared + service).
|
|
"""
|
|
sx_dir = os.path.join(service_dir, "sx")
|
|
if os.path.isdir(sx_dir):
|
|
load_sx_dir(sx_dir) # finalize=True by default
|
|
watch_sx_dir(sx_dir)
|
|
|
|
_rebuild_closures()
|
|
|
|
# Load handler definitions if service_name is provided
|
|
if service_name:
|
|
load_handler_dir(os.path.join(sx_dir, "handlers"), service_name)
|
|
# Also check for a single handlers.sx file
|
|
handlers_file = os.path.join(sx_dir, "handlers.sx")
|
|
if os.path.isfile(handlers_file):
|
|
from .handlers import load_handler_file
|
|
load_handler_file(handlers_file, service_name)
|
|
|
|
|
|
def load_handler_dir(directory: str, service_name: str) -> None:
|
|
"""Load handler .sx files from a directory if it exists."""
|
|
if os.path.isdir(directory):
|
|
from .handlers import load_handler_dir as _load
|
|
_load(directory, service_name)
|
|
|
|
|
|
def register_components(sx_source: str, *, _defer_postprocess: bool = False) -> None:
|
|
"""Parse and evaluate s-expression component definitions into the
|
|
shared environment.
|
|
|
|
When *_defer_postprocess* is True, skip deps/io_refs/hash computation.
|
|
Call ``finalize_components()`` once after all files are loaded.
|
|
"""
|
|
from .ref.sx_ref import eval_expr as _raw_eval, trampoline as _trampoline
|
|
_eval = lambda expr, env: _trampoline(_raw_eval(expr, env))
|
|
from .parser import parse_all
|
|
from .css_registry import scan_classes_from_sx
|
|
|
|
# Snapshot existing component names before eval
|
|
existing = set(_COMPONENT_ENV.keys())
|
|
|
|
exprs = parse_all(sx_source)
|
|
for expr in exprs:
|
|
_eval(expr, _COMPONENT_ENV)
|
|
|
|
# Pre-scan CSS classes for newly registered components.
|
|
all_classes: set[str] | None = None
|
|
for key, val in _COMPONENT_ENV.items():
|
|
if key not in existing and isinstance(val, (Component, Island)):
|
|
if all_classes is None:
|
|
all_classes = scan_classes_from_sx(sx_source)
|
|
val.css_classes = set(all_classes)
|
|
|
|
if not _defer_postprocess:
|
|
finalize_components()
|
|
|
|
|
|
def finalize_components() -> None:
|
|
"""Compute deps, IO refs, and hash for all registered components.
|
|
|
|
Called once after all component files are loaded.
|
|
"""
|
|
from .deps import compute_all_deps, compute_all_io_refs, get_all_io_names
|
|
compute_all_deps(_COMPONENT_ENV)
|
|
compute_all_io_refs(_COMPONENT_ENV, get_all_io_names())
|
|
_compute_component_hash()
|
|
|
|
# OCaml bridge loads components lazily on first render via
|
|
# OcamlBridge._ensure_components() — no sync needed here.
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# sx() — render s-expression from Jinja template
|
|
# ---------------------------------------------------------------------------
|
|
|
|
def sx(source: str, **kwargs: Any) -> str:
|
|
"""Render an s-expression string to HTML.
|
|
|
|
Keyword arguments are merged into the evaluation environment,
|
|
so Jinja context variables can be passed through::
|
|
|
|
{{ sx('(~shared:fragments/link-card :title title :slug slug)',
|
|
title=post.title, slug=post.slug) | safe }}
|
|
|
|
This is a synchronous function — suitable for Jinja globals.
|
|
For async resolution (with I/O primitives), use ``sx_async()``.
|
|
"""
|
|
env = dict(_COMPONENT_ENV)
|
|
env.update(kwargs)
|
|
expr = parse(source)
|
|
return html_render(expr, env)
|
|
|
|
|
|
def render(component_name: str, **kwargs: Any) -> str:
|
|
"""Call a registered component by name with Python kwargs.
|
|
|
|
Automatically converts Python snake_case to sx kebab-case.
|
|
No sx strings needed — just a function call.
|
|
"""
|
|
name = component_name if component_name.startswith("~") else f"~{component_name}"
|
|
comp = _COMPONENT_ENV.get(name)
|
|
if not isinstance(comp, Component):
|
|
raise ValueError(f"Unknown component: {name}")
|
|
|
|
env = dict(_COMPONENT_ENV)
|
|
args: list[Any] = []
|
|
for key, val in kwargs.items():
|
|
kw_name = key.replace("_", "-")
|
|
args.append(Keyword(kw_name))
|
|
args.append(val)
|
|
env[kw_name] = val
|
|
|
|
return _render_component(comp, args, env)
|
|
|
|
|
|
async def sx_async(source: str, **kwargs: Any) -> str:
|
|
"""Async version of ``sx()`` — resolves I/O primitives (frag, query)
|
|
before rendering.
|
|
|
|
Use when the s-expression contains I/O nodes::
|
|
|
|
{{ sx_async('(frag "blog" "card" :slug "apple")') | safe }}
|
|
|
|
When SX_USE_OCAML=1, renders via the OCaml kernel subprocess which
|
|
yields io-requests back to Python for async fulfillment.
|
|
"""
|
|
if _os.environ.get("SX_USE_OCAML") == "1":
|
|
from .ocaml_bridge import get_bridge
|
|
bridge = await get_bridge()
|
|
ctx = dict(kwargs)
|
|
return await bridge.render(source, ctx=ctx)
|
|
|
|
from .resolver import resolve, RequestContext
|
|
|
|
env = dict(_COMPONENT_ENV)
|
|
env.update(kwargs)
|
|
expr = parse(source)
|
|
|
|
# Try to get request context from Quart
|
|
ctx = _get_request_context()
|
|
return await resolve(expr, ctx=ctx, env=env)
|
|
|
|
|
|
def _get_request_context():
|
|
"""Build RequestContext from current Quart request, if available."""
|
|
from .primitives_io import RequestContext
|
|
try:
|
|
from quart import g, request
|
|
user = getattr(g, "user", None)
|
|
is_htmx = bool(request.headers.get("SX-Request") or request.headers.get("HX-Request"))
|
|
return RequestContext(user=user, is_htmx=is_htmx)
|
|
except Exception:
|
|
return RequestContext()
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Quart integration
|
|
# ---------------------------------------------------------------------------
|
|
|
|
def client_components_tag(*names: str) -> str:
|
|
"""Emit a <script type="text/sx"> tag with component definitions.
|
|
|
|
Reads the source definitions from loaded .sx files and sends them
|
|
to the client so sx.js can render them identically.
|
|
|
|
Usage in Python::
|
|
|
|
body_end_html = client_components_tag("test-filter-card", "test-row")
|
|
|
|
Or send all loaded components::
|
|
|
|
body_end_html = client_components_tag()
|
|
"""
|
|
from .parser import serialize
|
|
parts = []
|
|
for key, val in _COMPONENT_ENV.items():
|
|
if isinstance(val, Island):
|
|
if names and val.name not in names and key.lstrip("~") not in names:
|
|
continue
|
|
param_strs = ["&key"] + list(val.params)
|
|
if val.has_children:
|
|
param_strs.extend(["&rest", "children"])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body, pretty=True)
|
|
parts.append(f"(defisland ~{val.name} {params_sx} {body_sx})")
|
|
elif isinstance(val, Component):
|
|
if names and val.name not in names and key.lstrip("~") not in names:
|
|
continue
|
|
# Reconstruct defcomp source from the Component object
|
|
param_strs = ["&key"] + list(val.params)
|
|
if val.has_children:
|
|
param_strs.extend(["&rest", "children"])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body, pretty=True)
|
|
parts.append(f"(defcomp ~{val.name} {params_sx} {body_sx})")
|
|
elif isinstance(val, Macro):
|
|
if names and val.name not in names:
|
|
continue
|
|
param_strs = list(val.params)
|
|
if val.rest_param:
|
|
param_strs.extend(["&rest", val.rest_param])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body, pretty=True)
|
|
parts.append(f"(defmacro {val.name} {params_sx} {body_sx})")
|
|
if not parts and not _CLIENT_LIBRARY_SOURCES:
|
|
return ""
|
|
all_parts = list(_CLIENT_LIBRARY_SOURCES) + parts
|
|
source = "\n".join(all_parts)
|
|
return f'<script type="text/sx" data-components>{source}</script>'
|
|
|
|
|
|
def components_for_page(page_sx: str, service: str | None = None) -> tuple[str, str]:
|
|
"""Return (component_defs_source, stable_hash) for a page.
|
|
|
|
Sends per-page component subsets for bandwidth, but the hash is
|
|
computed from the FULL component env — stable across all pages.
|
|
Browser caches once on first page load, subsequent navigations
|
|
hit the cache (same hash) without re-downloading.
|
|
|
|
Components go to the client for: hydration, client-side routing,
|
|
data binding, and future CID-based caching.
|
|
"""
|
|
from .deps import components_needed
|
|
from .parser import serialize
|
|
|
|
needed = components_needed(page_sx, _COMPONENT_ENV)
|
|
|
|
# Include deps for all :data pages so the client can render them
|
|
# during client-side navigation.
|
|
if service:
|
|
from .pages import get_all_pages
|
|
for page_def in get_all_pages(service).values():
|
|
if page_def.data_expr is not None and page_def.content_expr is not None:
|
|
content_src = serialize(page_def.content_expr)
|
|
needed |= components_needed(content_src, _COMPONENT_ENV)
|
|
|
|
if not needed:
|
|
return "", ""
|
|
|
|
parts = []
|
|
for key, val in _COMPONENT_ENV.items():
|
|
if isinstance(val, Island):
|
|
if f"~{val.name}" in needed or key in needed:
|
|
param_strs = ["&key"] + list(val.params)
|
|
if val.has_children:
|
|
param_strs.extend(["&rest", "children"])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body, pretty=True)
|
|
parts.append(f"(defisland ~{val.name} {params_sx} {body_sx})")
|
|
elif isinstance(val, Component):
|
|
if f"~{val.name}" in needed or key in needed:
|
|
param_strs = ["&key"] + list(val.params)
|
|
if val.has_children:
|
|
param_strs.extend(["&rest", "children"])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body, pretty=True)
|
|
parts.append(f"(defcomp ~{val.name} {params_sx} {body_sx})")
|
|
elif isinstance(val, Macro):
|
|
# Include all macros — small and often shared across pages
|
|
param_strs = list(val.params)
|
|
if val.rest_param:
|
|
param_strs.extend(["&rest", val.rest_param])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body, pretty=True)
|
|
parts.append(f"(defmacro {val.name} {params_sx} {body_sx})")
|
|
|
|
if not parts and not _CLIENT_LIBRARY_SOURCES:
|
|
return "", ""
|
|
|
|
# Prepend client library sources (define forms) before component defs
|
|
all_parts = list(_CLIENT_LIBRARY_SOURCES) + parts
|
|
source = "\n".join(all_parts)
|
|
|
|
# Hash from FULL component env — stable across all pages.
|
|
# Browser caches by this hash; same hash = cache hit on navigation.
|
|
digest = _component_env_hash()
|
|
return source, digest
|
|
|
|
|
|
# Cached full-env hash — invalidated when components are reloaded.
|
|
_env_hash_cache: str | None = None
|
|
|
|
|
|
def _component_env_hash() -> str:
|
|
"""Compute a stable hash from all loaded component names + bodies."""
|
|
global _env_hash_cache
|
|
if _env_hash_cache is not None:
|
|
return _env_hash_cache
|
|
from .parser import serialize
|
|
h = hashlib.sha256()
|
|
for key in sorted(_COMPONENT_ENV.keys()):
|
|
val = _COMPONENT_ENV[key]
|
|
if isinstance(val, (Island, Component, Macro)):
|
|
h.update(key.encode())
|
|
h.update(serialize(val.body).encode())
|
|
_env_hash_cache = h.hexdigest()[:12]
|
|
return _env_hash_cache
|
|
|
|
|
|
def invalidate_component_hash():
|
|
"""Call when components are reloaded (hot-reload, file change)."""
|
|
global _env_hash_cache
|
|
_env_hash_cache = None
|
|
|
|
|
|
def css_classes_for_page(page_sx: str, service: str | None = None) -> set[str]:
|
|
"""Return CSS classes needed for a page's component bundle + page source.
|
|
|
|
Instead of unioning ALL component CSS classes, only includes classes
|
|
from components the page actually uses (plus all :data page deps).
|
|
"""
|
|
from .deps import components_needed
|
|
from .css_registry import scan_classes_from_sx
|
|
from .parser import serialize
|
|
|
|
needed = components_needed(page_sx, _COMPONENT_ENV)
|
|
|
|
if service:
|
|
from .pages import get_all_pages
|
|
for page_def in get_all_pages(service).values():
|
|
if page_def.data_expr is not None and page_def.content_expr is not None:
|
|
content_src = serialize(page_def.content_expr)
|
|
needed |= components_needed(content_src, _COMPONENT_ENV)
|
|
classes: set[str] = set()
|
|
|
|
for key, val in _COMPONENT_ENV.items():
|
|
if isinstance(val, (Component, Island)):
|
|
if (f"~{val.name}" in needed or key in needed) and val.css_classes:
|
|
classes.update(val.css_classes)
|
|
|
|
# Page sx is unique per request — scan it
|
|
classes.update(scan_classes_from_sx(page_sx))
|
|
return classes
|
|
|
|
|
|
def sx_css_all() -> str:
|
|
"""Return all CSS rules (preamble + utilities) for Jinja fallback pages."""
|
|
from .css_registry import get_all_css
|
|
return get_all_css()
|
|
|
|
|
|
def setup_sx_bridge(app: Any) -> None:
|
|
"""Register s-expression helpers with a Quart app's Jinja environment.
|
|
|
|
Call this in your app factory after ``setup_jinja(app)``::
|
|
|
|
from shared.sx.jinja_bridge import setup_sx_bridge
|
|
setup_sx_bridge(app)
|
|
|
|
This registers:
|
|
- ``sx(source, **kwargs)`` — sync render (components, pure HTML)
|
|
- ``sx_async(source, **kwargs)`` — async render (with I/O resolution)
|
|
- ``sx_css_all()`` — full CSS dump for non-sx pages
|
|
"""
|
|
app.jinja_env.globals["sx"] = sx
|
|
app.jinja_env.globals["render"] = render
|
|
app.jinja_env.globals["sx_async"] = sx_async
|
|
app.jinja_env.globals["sx_css_all"] = sx_css_all
|