Restore stashed WIP: live streaming plan, forms, CI pipeline, streaming demo
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -14,7 +14,7 @@
|
||||
// =========================================================================
|
||||
|
||||
var NIL = Object.freeze({ _nil: true, toString: function() { return "nil"; } });
|
||||
var SX_VERSION = "2026-03-07T20:18:37Z";
|
||||
var SX_VERSION = "2026-03-07T21:45:27Z";
|
||||
|
||||
function isNil(x) { return x === NIL || x === null || x === undefined; }
|
||||
function isSxTruthy(x) { return x !== false && !isNil(x); }
|
||||
@@ -2046,7 +2046,7 @@ return domAppendToHead(link); }, domQueryAll(container, "link[rel=\"stylesheet\"
|
||||
if (isSxTruthy(hasIo)) {
|
||||
registerIoDeps(ioDeps);
|
||||
}
|
||||
return (isSxTruthy(get(match, "has-data")) ? (function() {
|
||||
return (isSxTruthy(get(match, "stream")) ? (logInfo((String("sx:route streaming ") + String(pathname))), fetchStreaming(target, pathname, buildRequestHeaders(target, loadedComponentNames(), _cssHash)), true) : (isSxTruthy(get(match, "has-data")) ? (function() {
|
||||
var cacheKey = pageDataCacheKey(pageName, params);
|
||||
var cached = pageDataCacheGet(cacheKey);
|
||||
return (isSxTruthy(cached) ? (function() {
|
||||
@@ -2067,7 +2067,7 @@ return (function() {
|
||||
var env = merge(closure, params);
|
||||
var rendered = tryEvalContent(contentSrc, env);
|
||||
return (isSxTruthy(isNil(rendered)) ? (logInfo((String("sx:route server (eval failed) ") + String(pathname))), false) : (swapRenderedContent(target, rendered, pathname), true));
|
||||
})()));
|
||||
})())));
|
||||
})()));
|
||||
})());
|
||||
})());
|
||||
@@ -3033,6 +3033,134 @@ callExpr.push(dictGet(kwargs, k)); } }
|
||||
}).catch(function() { location.reload(); });
|
||||
}
|
||||
|
||||
function fetchStreaming(target, url, headers) {
|
||||
// Streaming fetch for multi-stream pages.
|
||||
// First chunk = OOB SX swap (shell with skeletons).
|
||||
// Subsequent chunks = __sxResolve script tags filling suspense slots.
|
||||
var opts = { headers: headers };
|
||||
try {
|
||||
var h = new URL(url, location.href).hostname;
|
||||
if (h !== location.hostname &&
|
||||
(h.indexOf(".rose-ash.com") >= 0 || h.indexOf(".localhost") >= 0)) {
|
||||
opts.credentials = "include";
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
fetch(url, opts).then(function(resp) {
|
||||
if (!resp.ok || !resp.body) {
|
||||
// Fallback: non-streaming
|
||||
return resp.text().then(function(text) {
|
||||
text = stripComponentScripts(text);
|
||||
text = extractResponseCss(text);
|
||||
text = text.trim();
|
||||
if (text.charAt(0) === "(") {
|
||||
var dom = sxRender(text);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var reader = resp.body.getReader();
|
||||
var decoder = new TextDecoder();
|
||||
var buffer = "";
|
||||
var initialSwapDone = false;
|
||||
// Regex to match __sxResolve script tags
|
||||
var RESOLVE_START = "<script>window.__sxResolve&&window.__sxResolve(";
|
||||
var RESOLVE_END = ")</script>";
|
||||
|
||||
function processResolveScripts() {
|
||||
// Strip and load any extra component defs before resolve scripts
|
||||
buffer = stripSxScripts(buffer);
|
||||
var idx;
|
||||
while ((idx = buffer.indexOf(RESOLVE_START)) >= 0) {
|
||||
var endIdx = buffer.indexOf(RESOLVE_END, idx);
|
||||
if (endIdx < 0) break; // incomplete, wait for more data
|
||||
var argsStr = buffer.substring(idx + RESOLVE_START.length, endIdx);
|
||||
buffer = buffer.substring(endIdx + RESOLVE_END.length);
|
||||
// argsStr is: "stream-id","sx source"
|
||||
var commaIdx = argsStr.indexOf(",");
|
||||
if (commaIdx >= 0) {
|
||||
try {
|
||||
var id = JSON.parse(argsStr.substring(0, commaIdx));
|
||||
var sx = JSON.parse(argsStr.substring(commaIdx + 1));
|
||||
if (typeof Sx !== "undefined" && Sx.resolveSuspense) {
|
||||
Sx.resolveSuspense(id, sx);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[sx-ref] resolve parse error:", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function pump() {
|
||||
return reader.read().then(function(result) {
|
||||
buffer += decoder.decode(result.value || new Uint8Array(), { stream: !result.done });
|
||||
|
||||
if (!initialSwapDone) {
|
||||
// Look for the first resolve script — everything before it is OOB content
|
||||
var scriptIdx = buffer.indexOf("<script>window.__sxResolve");
|
||||
// If we found a script tag, or the stream is done, process OOB
|
||||
var oobEnd = scriptIdx >= 0 ? scriptIdx : (result.done ? buffer.length : -1);
|
||||
if (oobEnd >= 0) {
|
||||
var oobContent = buffer.substring(0, oobEnd);
|
||||
buffer = buffer.substring(oobEnd);
|
||||
initialSwapDone = true;
|
||||
|
||||
// Process OOB SX content (same as fetchAndRestore)
|
||||
oobContent = stripComponentScripts(oobContent);
|
||||
// Also strip bare <script type="text/sx"> (extra defs from resolve chunks)
|
||||
oobContent = stripSxScripts(oobContent);
|
||||
oobContent = extractResponseCss(oobContent);
|
||||
oobContent = oobContent.trim();
|
||||
if (oobContent.charAt(0) === "(") {
|
||||
try {
|
||||
var dom = sxRender(oobContent);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
// Dispatch clientRoute so nav links update active state
|
||||
domDispatch(target, "sx:clientRoute",
|
||||
{ pathname: new URL(url, location.href).pathname });
|
||||
} catch (err) {
|
||||
console.error("[sx-ref] streaming OOB swap error:", err);
|
||||
}
|
||||
}
|
||||
// Process any resolve scripts already in buffer
|
||||
processResolveScripts();
|
||||
}
|
||||
} else {
|
||||
// Process resolve scripts as they arrive
|
||||
processResolveScripts();
|
||||
}
|
||||
|
||||
if (!result.done) return pump();
|
||||
});
|
||||
}
|
||||
|
||||
return pump();
|
||||
}).catch(function(err) {
|
||||
console.error("[sx-ref] streaming fetch error:", err);
|
||||
location.reload();
|
||||
});
|
||||
}
|
||||
|
||||
function fetchPreload(url, headers, cache) {
|
||||
fetch(url, { headers: headers }).then(function(resp) {
|
||||
if (!resp.ok) return;
|
||||
@@ -3497,6 +3625,14 @@ callExpr.push(dictGet(kwargs, k)); } }
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function stripSxScripts(text) {
|
||||
// Strip <script type="text/sx">...</script> (without data-components).
|
||||
// These contain extra component defs from streaming resolve chunks.
|
||||
var SxObj = typeof Sx !== "undefined" ? Sx : null;
|
||||
return text.replace(/<script[^>]*type="text\/sx"[^>]*>([\s\S]*?)<\/script>/gi,
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function extractResponseCss(text) {
|
||||
if (!_hasDom) return text;
|
||||
var target = document.getElementById("sx-css");
|
||||
|
||||
@@ -5,6 +5,10 @@ module.exports = {
|
||||
// Levels 1–4 produce shades 400–100 (level 5+ yields 0 or negative = no match)
|
||||
{ pattern: /^bg-sky-(100|200|300|400|500)$/ },
|
||||
{ pattern: /^bg-violet-(100|200|300|400|500)$/ },
|
||||
// Streaming demo: ~streaming-demo-chunk builds classes dynamically via (str ...)
|
||||
// from a color map — Tailwind scanner can't detect these in string literals.
|
||||
{ pattern: /^(bg|text|border)-(green|blue|amber)-(50|100|200|300|400|500|600|700|800|900)$/ },
|
||||
'w-3', 'h-3', 'w-1/3', 'w-2/3', 'w-1/2', 'w-3/4', 'animate-pulse',
|
||||
],
|
||||
content: [
|
||||
'/root/rose-ash/shared/sx/templates/**/*.sx',
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1003,6 +1003,7 @@ def _sf_defpage(expr: list, env: dict) -> PageDef:
|
||||
menu_expr=slots.get("menu"),
|
||||
stream=stream,
|
||||
fallback_expr=slots.get("fallback"),
|
||||
shell_expr=slots.get("shell"),
|
||||
closure=dict(env),
|
||||
)
|
||||
env[f"page:{name_sym.name}"] = page
|
||||
|
||||
@@ -18,6 +18,7 @@ Usage::
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
@@ -86,10 +87,15 @@ def register_page_helpers(service: str, helpers: dict[str, Any]) -> None:
|
||||
for name in helpers:
|
||||
validate_helper(service, name)
|
||||
|
||||
# Wrap helpers to validate return values at the boundary
|
||||
# Wrap helpers to validate return values at the boundary.
|
||||
# Async generators pass through unwrapped — their yields are validated
|
||||
# by the streaming infrastructure, not at the helper boundary.
|
||||
wrapped: dict[str, Any] = {}
|
||||
for name, fn in helpers.items():
|
||||
if asyncio.iscoroutinefunction(fn):
|
||||
if inspect.isasyncgenfunction(fn):
|
||||
# Async generator: pass through (streaming infra validates yields)
|
||||
wrapped[name] = fn
|
||||
elif asyncio.iscoroutinefunction(fn):
|
||||
@functools.wraps(fn)
|
||||
async def _async_wrap(*a, _fn=fn, _name=name, **kw):
|
||||
result = await _fn(*a, **kw)
|
||||
@@ -168,6 +174,44 @@ async def _eval_slot(expr: Any, env: dict, ctx: Any) -> str:
|
||||
return await async_eval_slot_to_sx(expr, env, ctx)
|
||||
|
||||
|
||||
def _replace_suspense_sexp(sx: str, stream_id: str, replacement: str) -> str:
|
||||
"""Replace a rendered ~suspense div in SX source with replacement content.
|
||||
|
||||
After _eval_slot, ~suspense expands to:
|
||||
(div :id "sx-suspense-{id}" :data-suspense "{id}" :style "display:contents" ...)
|
||||
This finds the balanced s-expression containing :data-suspense "{id}" and
|
||||
replaces it with the given replacement string.
|
||||
"""
|
||||
marker = f':data-suspense "{stream_id}"'
|
||||
idx = sx.find(marker)
|
||||
if idx < 0:
|
||||
return sx
|
||||
# Walk backwards to find the opening paren of the containing (div ...)
|
||||
start = sx.rfind("(", 0, idx)
|
||||
if start < 0:
|
||||
return sx
|
||||
# Walk forward from start to find matching close paren (balanced)
|
||||
depth = 0
|
||||
i = start
|
||||
while i < len(sx):
|
||||
ch = sx[i]
|
||||
if ch == "(":
|
||||
depth += 1
|
||||
elif ch == ")":
|
||||
depth -= 1
|
||||
if depth == 0:
|
||||
return sx[:start] + replacement + sx[i + 1:]
|
||||
elif ch == '"':
|
||||
# Skip string contents
|
||||
i += 1
|
||||
while i < len(sx) and sx[i] != '"':
|
||||
if sx[i] == "\\":
|
||||
i += 1 # skip escaped char
|
||||
i += 1
|
||||
i += 1
|
||||
return sx
|
||||
|
||||
|
||||
async def execute_page(
|
||||
page_def: PageDef,
|
||||
service_name: str,
|
||||
@@ -207,15 +251,47 @@ async def execute_page(
|
||||
ctx = _get_request_context()
|
||||
|
||||
# Evaluate :data expression if present
|
||||
_multi_stream_content = None
|
||||
if page_def.data_expr is not None:
|
||||
data_result = await async_eval(page_def.data_expr, env, ctx)
|
||||
if isinstance(data_result, dict):
|
||||
if hasattr(data_result, '__aiter__'):
|
||||
# Multi-stream: consume generator, eval :content per chunk,
|
||||
# combine into shell with resolved suspense slots.
|
||||
chunks = []
|
||||
async for chunk in data_result:
|
||||
if not isinstance(chunk, dict):
|
||||
continue
|
||||
chunk = dict(chunk)
|
||||
stream_id = chunk.pop("stream-id", "stream-content")
|
||||
chunk_env = dict(env)
|
||||
for k, v in chunk.items():
|
||||
chunk_env[k.replace("_", "-")] = v
|
||||
chunk_sx = await _eval_slot(page_def.content_expr, chunk_env, ctx) if page_def.content_expr else ""
|
||||
chunks.append((stream_id, chunk_sx))
|
||||
# Build content: if :shell exists, render it and inline resolved chunks
|
||||
if page_def.shell_expr is not None:
|
||||
shell_sx = await _eval_slot(page_def.shell_expr, env, ctx)
|
||||
# Replace each rendered suspense div with resolved content.
|
||||
# _eval_slot expands ~suspense into:
|
||||
# (div :id "sx-suspense-X" :data-suspense "X" :style "display:contents" ...)
|
||||
# We find the balanced s-expr containing :data-suspense "X" and replace it.
|
||||
for stream_id, chunk_sx in chunks:
|
||||
shell_sx = _replace_suspense_sexp(shell_sx, stream_id, chunk_sx)
|
||||
_multi_stream_content = shell_sx
|
||||
else:
|
||||
# No shell: just concatenate all chunks in a fragment
|
||||
parts = " ".join(sx for _, sx in chunks)
|
||||
_multi_stream_content = f"(<> {parts})"
|
||||
elif isinstance(data_result, dict):
|
||||
# Merge with kebab-case keys so SX symbols can reference them
|
||||
for k, v in data_result.items():
|
||||
env[k.replace("_", "-")] = v
|
||||
|
||||
# Render content slot (required)
|
||||
content_sx = await _eval_slot(page_def.content_expr, env, ctx)
|
||||
if _multi_stream_content is not None:
|
||||
content_sx = _multi_stream_content
|
||||
else:
|
||||
content_sx = await _eval_slot(page_def.content_expr, env, ctx)
|
||||
|
||||
# Render optional slots
|
||||
filter_sx = ""
|
||||
@@ -391,25 +467,60 @@ async def execute_page_streaming(
|
||||
|
||||
# --- Launch concurrent IO tasks (inherit context via create_task) ---
|
||||
|
||||
_stream_queue: asyncio.Queue = asyncio.Queue()
|
||||
_multi_stream = False
|
||||
|
||||
async def _eval_data_and_content():
|
||||
data_env = dict(env)
|
||||
if page_def.data_expr is not None:
|
||||
data_result = await async_eval(page_def.data_expr, data_env, ctx)
|
||||
if isinstance(data_result, dict):
|
||||
for k, v in data_result.items():
|
||||
data_env[k.replace("_", "-")] = v
|
||||
content_sx = await _eval_slot(page_def.content_expr, data_env, ctx) if page_def.content_expr else ""
|
||||
filter_sx = await _eval_slot(page_def.filter_expr, data_env, ctx) if page_def.filter_expr else ""
|
||||
aside_sx = await _eval_slot(page_def.aside_expr, data_env, ctx) if page_def.aside_expr else ""
|
||||
menu_sx = await _eval_slot(page_def.menu_expr, data_env, ctx) if page_def.menu_expr else ""
|
||||
return content_sx, filter_sx, aside_sx, menu_sx
|
||||
"""Evaluate :data then :content.
|
||||
|
||||
If :data returns an async generator (multi-stream mode), iterate it
|
||||
and push each (stream_id, content_sx) to _stream_queue incrementally.
|
||||
The main stream loop drains the queue and sends resolve scripts as
|
||||
items arrive — giving true staggered streaming.
|
||||
"""
|
||||
nonlocal _multi_stream
|
||||
try:
|
||||
data_env = dict(env)
|
||||
if page_def.data_expr is not None:
|
||||
data_result = await async_eval(page_def.data_expr, data_env, ctx)
|
||||
# Async generator: multi-stream mode
|
||||
if hasattr(data_result, '__aiter__'):
|
||||
_multi_stream = True
|
||||
async for chunk in data_result:
|
||||
if not isinstance(chunk, dict):
|
||||
continue
|
||||
chunk = dict(chunk) # copy so pop doesn't mutate
|
||||
stream_id = chunk.pop("stream-id", "stream-content")
|
||||
chunk_env = dict(env)
|
||||
for k, v in chunk.items():
|
||||
chunk_env[k.replace("_", "-")] = v
|
||||
content_sx = await _eval_slot(page_def.content_expr, chunk_env, ctx) if page_def.content_expr else ""
|
||||
await _stream_queue.put(("data", stream_id, content_sx))
|
||||
await _stream_queue.put(("data-done",))
|
||||
return
|
||||
if isinstance(data_result, dict):
|
||||
for k, v in data_result.items():
|
||||
data_env[k.replace("_", "-")] = v
|
||||
content_sx = await _eval_slot(page_def.content_expr, data_env, ctx) if page_def.content_expr else ""
|
||||
filter_sx = await _eval_slot(page_def.filter_expr, data_env, ctx) if page_def.filter_expr else ""
|
||||
aside_sx = await _eval_slot(page_def.aside_expr, data_env, ctx) if page_def.aside_expr else ""
|
||||
menu_sx = await _eval_slot(page_def.menu_expr, data_env, ctx) if page_def.menu_expr else ""
|
||||
await _stream_queue.put(("data-single", content_sx, filter_sx, aside_sx, menu_sx))
|
||||
except Exception as e:
|
||||
logger.error("Streaming data task failed: %s", e)
|
||||
await _stream_queue.put(("data-done",))
|
||||
|
||||
async def _eval_headers():
|
||||
if layout is None:
|
||||
return "", ""
|
||||
rows = await layout.full_headers(tctx, **layout_kwargs)
|
||||
menu = await layout.mobile_menu(tctx, **layout_kwargs)
|
||||
return rows, menu
|
||||
try:
|
||||
if layout is None:
|
||||
await _stream_queue.put(("headers", "", ""))
|
||||
return
|
||||
rows = await layout.full_headers(tctx, **layout_kwargs)
|
||||
menu = await layout.mobile_menu(tctx, **layout_kwargs)
|
||||
await _stream_queue.put(("headers", rows, menu))
|
||||
except Exception as e:
|
||||
logger.error("Streaming headers task failed: %s", e)
|
||||
await _stream_queue.put(("headers", "", ""))
|
||||
|
||||
data_task = asyncio.create_task(_eval_data_and_content())
|
||||
header_task = asyncio.create_task(_eval_headers())
|
||||
@@ -419,7 +530,15 @@ async def execute_page_streaming(
|
||||
# No dependency on sx-browser.js boot timing for the initial shell.
|
||||
|
||||
suspense_header_sx = f'(~suspense :id "stream-headers" :fallback {header_fallback})'
|
||||
suspense_content_sx = f'(~suspense :id "stream-content" :fallback {fallback_sx})'
|
||||
|
||||
# When :shell is provided, it renders directly as the content slot
|
||||
# (it contains its own ~suspense for the data-dependent part).
|
||||
# Otherwise, wrap the entire :content in a single suspense.
|
||||
if page_def.shell_expr is not None:
|
||||
shell_content_sx = await _eval_slot(page_def.shell_expr, env, ctx)
|
||||
suspense_content_sx = shell_content_sx
|
||||
else:
|
||||
suspense_content_sx = f'(~suspense :id "stream-content" :fallback {fallback_sx})'
|
||||
|
||||
initial_page_html = await _helpers_render_to_html("app-body",
|
||||
header_rows=SxExpr(suspense_header_sx),
|
||||
@@ -434,7 +553,10 @@ async def execute_page_streaming(
|
||||
content_ref = ""
|
||||
if page_def.content_expr is not None:
|
||||
content_ref = sx_serialize(page_def.content_expr)
|
||||
page_sx_for_scan = f'(<> {layout_refs} {content_ref} (~app-body :header-rows {suspense_header_sx} :content {suspense_content_sx}))'
|
||||
shell_ref = ""
|
||||
if page_def.shell_expr is not None:
|
||||
shell_ref = sx_serialize(page_def.shell_expr)
|
||||
page_sx_for_scan = f'(<> {layout_refs} {content_ref} {shell_ref} (~app-body :header-rows {suspense_header_sx} :content {suspense_content_sx}))'
|
||||
shell, tail = sx_page_streaming_parts(
|
||||
tctx, initial_page_html, page_sx=page_sx_for_scan,
|
||||
)
|
||||
@@ -476,36 +598,244 @@ async def execute_page_streaming(
|
||||
async def _stream_chunks():
|
||||
yield shell + tail
|
||||
|
||||
tasks = {data_task: "data", header_task: "headers"}
|
||||
pending = set(tasks.keys())
|
||||
|
||||
while pending:
|
||||
done, pending = await asyncio.wait(
|
||||
pending, return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
for task in done:
|
||||
label = tasks[task]
|
||||
try:
|
||||
result = task.result()
|
||||
except Exception as e:
|
||||
logger.error("Streaming %s task failed: %s", label, e)
|
||||
continue
|
||||
|
||||
if label == "data":
|
||||
content_sx, filter_sx, aside_sx, menu_sx = result
|
||||
extras = _extra_defs(content_sx)
|
||||
yield sx_streaming_resolve_script("stream-content", content_sx, extras)
|
||||
elif label == "headers":
|
||||
header_rows, header_menu = result
|
||||
# Both tasks push tagged items onto _stream_queue. We drain until
|
||||
# both are done. Items: ("headers", rows, menu), ("data-single", ...),
|
||||
# ("data", stream_id, sx), ("data-done",).
|
||||
remaining = 2 # waiting for: headers + data
|
||||
while remaining > 0:
|
||||
item = await _stream_queue.get()
|
||||
kind = item[0]
|
||||
try:
|
||||
if kind == "headers":
|
||||
_, header_rows, header_menu = item
|
||||
remaining -= 1
|
||||
if header_rows:
|
||||
extras = _extra_defs(header_rows)
|
||||
yield sx_streaming_resolve_script("stream-headers", header_rows, extras)
|
||||
elif kind == "data-single":
|
||||
_, content_sx, filter_sx, aside_sx, menu_sx = item
|
||||
remaining -= 1
|
||||
extras = _extra_defs(content_sx)
|
||||
yield sx_streaming_resolve_script("stream-content", content_sx, extras)
|
||||
elif kind == "data":
|
||||
_, stream_id, content_sx = item
|
||||
extras = _extra_defs(content_sx)
|
||||
yield sx_streaming_resolve_script(stream_id, content_sx, extras)
|
||||
elif kind == "data-done":
|
||||
remaining -= 1
|
||||
except Exception as e:
|
||||
logger.error("Streaming resolve failed for %s: %s", kind, e)
|
||||
|
||||
yield "\n</body>\n</html>"
|
||||
|
||||
return _stream_chunks()
|
||||
|
||||
|
||||
async def execute_page_streaming_oob(
|
||||
page_def: PageDef,
|
||||
service_name: str,
|
||||
url_params: dict[str, Any] | None = None,
|
||||
):
|
||||
"""Execute a streaming page for HTMX/SX requests.
|
||||
|
||||
Like execute_page_streaming but yields OOB SX swap format instead of a
|
||||
full HTML document:
|
||||
1. First yield: OOB SX with shell content (suspense skeletons) + CSS + defs
|
||||
2. Subsequent yields: __sxResolve script tags as data resolves
|
||||
|
||||
The client uses streaming fetch (ReadableStream) to process the OOB swap
|
||||
immediately and then execute resolve scripts as they arrive.
|
||||
"""
|
||||
import asyncio
|
||||
from .jinja_bridge import get_component_env, _get_request_context
|
||||
from .async_eval import async_eval
|
||||
from .page import get_template_context
|
||||
from .helpers import (
|
||||
oob_page_sx,
|
||||
sx_streaming_resolve_script,
|
||||
components_for_request,
|
||||
SxExpr,
|
||||
)
|
||||
from .parser import serialize as sx_serialize
|
||||
from .layouts import get_layout
|
||||
|
||||
if url_params is None:
|
||||
url_params = {}
|
||||
|
||||
env = dict(get_component_env())
|
||||
env.update(get_page_helpers(service_name))
|
||||
env.update(page_def.closure)
|
||||
for key, val in url_params.items():
|
||||
kebab = key.replace("_", "-")
|
||||
env[kebab] = val
|
||||
env[key] = val
|
||||
|
||||
ctx = _get_request_context()
|
||||
|
||||
# Evaluate shell with suspense skeletons (no data yet)
|
||||
shell_sx = ""
|
||||
if page_def.shell_expr is not None:
|
||||
shell_sx = await _eval_slot(page_def.shell_expr, env, ctx)
|
||||
|
||||
# Build initial OOB response with shell as content
|
||||
tctx = await get_template_context()
|
||||
|
||||
# Resolve layout for OOB headers
|
||||
layout = None
|
||||
layout_kwargs: dict[str, Any] = {}
|
||||
if page_def.layout is not None:
|
||||
if isinstance(page_def.layout, str):
|
||||
layout_name = page_def.layout
|
||||
elif isinstance(page_def.layout, list):
|
||||
from .types import Keyword as SxKeyword, Symbol as SxSymbol
|
||||
raw = page_def.layout
|
||||
first = raw[0]
|
||||
layout_name = (
|
||||
first.name if isinstance(first, (SxKeyword, SxSymbol))
|
||||
else str(first)
|
||||
)
|
||||
i = 1
|
||||
while i < len(raw):
|
||||
k = raw[i]
|
||||
if isinstance(k, SxKeyword) and i + 1 < len(raw):
|
||||
resolved = await async_eval(raw[i + 1], env, ctx)
|
||||
layout_kwargs[k.name.replace("-", "_")] = resolved
|
||||
i += 2
|
||||
else:
|
||||
i += 1
|
||||
else:
|
||||
layout_name = str(page_def.layout)
|
||||
layout = get_layout(layout_name)
|
||||
|
||||
# Launch concurrent tasks
|
||||
_stream_queue: asyncio.Queue = asyncio.Queue()
|
||||
|
||||
async def _eval_data():
|
||||
try:
|
||||
if page_def.data_expr is not None:
|
||||
data_result = await async_eval(page_def.data_expr, env, ctx)
|
||||
if hasattr(data_result, '__aiter__'):
|
||||
async for chunk in data_result:
|
||||
if not isinstance(chunk, dict):
|
||||
continue
|
||||
chunk = dict(chunk)
|
||||
stream_id = chunk.pop("stream-id", "stream-content")
|
||||
chunk_env = dict(env)
|
||||
for k, v in chunk.items():
|
||||
chunk_env[k.replace("_", "-")] = v
|
||||
content_sx = await _eval_slot(page_def.content_expr, chunk_env, ctx) if page_def.content_expr else ""
|
||||
await _stream_queue.put(("data", stream_id, content_sx))
|
||||
await _stream_queue.put(("data-done",))
|
||||
return
|
||||
await _stream_queue.put(("data-done",))
|
||||
except Exception as e:
|
||||
logger.error("Streaming OOB data task failed: %s", e)
|
||||
await _stream_queue.put(("data-done",))
|
||||
|
||||
async def _eval_oob_headers():
|
||||
try:
|
||||
if layout is not None:
|
||||
oob_headers = await layout.oob_headers(tctx, **layout_kwargs)
|
||||
await _stream_queue.put(("headers", oob_headers))
|
||||
else:
|
||||
await _stream_queue.put(("headers", ""))
|
||||
except Exception as e:
|
||||
logger.error("Streaming OOB headers task failed: %s", e)
|
||||
await _stream_queue.put(("headers", ""))
|
||||
|
||||
data_task = asyncio.create_task(_eval_data())
|
||||
header_task = asyncio.create_task(_eval_oob_headers())
|
||||
|
||||
# Build initial OOB body with shell content (skeletons in place)
|
||||
oob_body = await oob_page_sx(
|
||||
oobs="", # headers will arrive via resolve script
|
||||
content=shell_sx,
|
||||
)
|
||||
|
||||
# Prepend component definitions + CSS (like sx_response does)
|
||||
from quart import request
|
||||
comp_defs = components_for_request(oob_body)
|
||||
body = oob_body
|
||||
if comp_defs:
|
||||
body = (f'<script type="text/sx" data-components>'
|
||||
f'{comp_defs}</script>\n{body}')
|
||||
|
||||
from .css_registry import scan_classes_from_sx, lookup_rules, registry_loaded
|
||||
if registry_loaded():
|
||||
new_classes = scan_classes_from_sx(oob_body)
|
||||
if comp_defs:
|
||||
new_classes.update(scan_classes_from_sx(comp_defs))
|
||||
known_raw = request.headers.get("SX-Css", "")
|
||||
if known_raw:
|
||||
from .css_registry import lookup_css_hash
|
||||
if len(known_raw) <= 16:
|
||||
looked_up = lookup_css_hash(known_raw)
|
||||
known_classes = looked_up if looked_up is not None else set()
|
||||
else:
|
||||
known_classes = set(known_raw.split(","))
|
||||
new_classes -= known_classes
|
||||
if new_classes:
|
||||
new_rules = lookup_rules(new_classes)
|
||||
if new_rules:
|
||||
body = f'<style data-sx-css>{new_rules}</style>\n{body}'
|
||||
|
||||
# Capture component env for extra defs in resolve chunks
|
||||
from .jinja_bridge import components_for_page as _comp_scan
|
||||
_base_scan = oob_body
|
||||
|
||||
def _extra_defs(sx_source: str) -> str:
|
||||
from .deps import components_needed
|
||||
comp_env = dict(get_component_env())
|
||||
base_needed = components_needed(_base_scan, comp_env)
|
||||
resolve_needed = components_needed(sx_source, comp_env)
|
||||
extra = resolve_needed - base_needed
|
||||
if not extra:
|
||||
return ""
|
||||
from .parser import serialize
|
||||
from .types import Component
|
||||
parts = []
|
||||
for key, val in comp_env.items():
|
||||
if isinstance(val, Component) and (f"~{val.name}" in extra or key in extra):
|
||||
param_strs = ["&key"] + list(val.params)
|
||||
if val.has_children:
|
||||
param_strs.extend(["&rest", "children"])
|
||||
params_sx = "(" + " ".join(param_strs) + ")"
|
||||
body_sx = serialize(val.body, pretty=True)
|
||||
parts.append(f"(defcomp ~{val.name} {params_sx} {body_sx})")
|
||||
return "\n".join(parts)
|
||||
|
||||
# Yield chunks
|
||||
async def _stream_oob_chunks():
|
||||
# First chunk: OOB swap with skeletons
|
||||
yield body
|
||||
|
||||
# Drain queue for resolve scripts
|
||||
remaining = 2 # headers + data
|
||||
while remaining > 0:
|
||||
item = await _stream_queue.get()
|
||||
kind = item[0]
|
||||
try:
|
||||
if kind == "headers":
|
||||
_, oob_hdr = item
|
||||
remaining -= 1
|
||||
# Headers don't need resolve scripts for OOB — they're
|
||||
# handled by OOB swap attributes in the SX content itself.
|
||||
# But if we have header content, send a resolve for it.
|
||||
if oob_hdr:
|
||||
extras = _extra_defs(oob_hdr)
|
||||
yield sx_streaming_resolve_script("stream-headers", oob_hdr, extras)
|
||||
elif kind == "data":
|
||||
_, stream_id, content_sx = item
|
||||
extras = _extra_defs(content_sx)
|
||||
yield sx_streaming_resolve_script(stream_id, content_sx, extras)
|
||||
elif kind == "data-done":
|
||||
remaining -= 1
|
||||
except Exception as e:
|
||||
logger.error("Streaming OOB resolve failed for %s: %s", kind, e)
|
||||
|
||||
return _stream_oob_chunks()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Blueprint mounting
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -556,19 +886,17 @@ def _mount_one_page(bp: Any, service_name: str, page_def: PageDef) -> None:
|
||||
from quart import make_response, Response
|
||||
|
||||
if page_def.stream:
|
||||
# Streaming response: yields HTML chunks as IO resolves
|
||||
# Streaming response: yields chunks as IO resolves
|
||||
async def page_view(**kwargs: Any) -> Any:
|
||||
from shared.browser.app.utils.htmx import is_htmx_request
|
||||
current = get_page(service_name, page_def.name) or page_def
|
||||
# Only stream for full page loads (not SX/HTMX requests)
|
||||
if is_htmx_request():
|
||||
result = await execute_page(current, service_name, url_params=kwargs)
|
||||
if hasattr(result, "status_code"):
|
||||
return result
|
||||
return await make_response(result, 200)
|
||||
# execute_page_streaming does all context-dependent setup as a
|
||||
# regular async function (while request context is live), then
|
||||
# returns an async generator that only yields strings.
|
||||
# Streaming OOB: shell with skeletons first, then resolve scripts
|
||||
gen = await execute_page_streaming_oob(
|
||||
current, service_name, url_params=kwargs,
|
||||
)
|
||||
return Response(gen, content_type="text/sx; charset=utf-8")
|
||||
# Full page streaming: HTML document with inline resolve scripts
|
||||
gen = await execute_page_streaming(
|
||||
current, service_name, url_params=kwargs,
|
||||
)
|
||||
@@ -696,7 +1024,14 @@ async def evaluate_page_data(
|
||||
|
||||
data_result = await async_eval(page_def.data_expr, env, ctx)
|
||||
|
||||
# Kebab-case dict keys (matching execute_page line 214-215)
|
||||
# Multi-stream: async generator can't be serialized as a single dict.
|
||||
# Return nil to signal the client to fall back to server-side rendering.
|
||||
if hasattr(data_result, '__aiter__'):
|
||||
# Close the generator cleanly
|
||||
await data_result.aclose()
|
||||
return "nil"
|
||||
|
||||
# Kebab-case dict keys (matching execute_page)
|
||||
if isinstance(data_result, dict):
|
||||
data_result = {
|
||||
k.replace("_", "-"): v for k, v in data_result.items()
|
||||
|
||||
@@ -361,6 +361,7 @@ class JSEmitter:
|
||||
"fetch-request": "fetchRequest",
|
||||
"fetch-location": "fetchLocation",
|
||||
"fetch-and-restore": "fetchAndRestore",
|
||||
"fetch-streaming": "fetchStreaming",
|
||||
"fetch-preload": "fetchPreload",
|
||||
"dom-query-by-id": "domQueryById",
|
||||
"dom-matches?": "domMatches",
|
||||
@@ -3123,6 +3124,134 @@ PLATFORM_ORCHESTRATION_JS = """
|
||||
}).catch(function() { location.reload(); });
|
||||
}
|
||||
|
||||
function fetchStreaming(target, url, headers) {
|
||||
// Streaming fetch for multi-stream pages.
|
||||
// First chunk = OOB SX swap (shell with skeletons).
|
||||
// Subsequent chunks = __sxResolve script tags filling suspense slots.
|
||||
var opts = { headers: headers };
|
||||
try {
|
||||
var h = new URL(url, location.href).hostname;
|
||||
if (h !== location.hostname &&
|
||||
(h.indexOf(".rose-ash.com") >= 0 || h.indexOf(".localhost") >= 0)) {
|
||||
opts.credentials = "include";
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
fetch(url, opts).then(function(resp) {
|
||||
if (!resp.ok || !resp.body) {
|
||||
// Fallback: non-streaming
|
||||
return resp.text().then(function(text) {
|
||||
text = stripComponentScripts(text);
|
||||
text = extractResponseCss(text);
|
||||
text = text.trim();
|
||||
if (text.charAt(0) === "(") {
|
||||
var dom = sxRender(text);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var reader = resp.body.getReader();
|
||||
var decoder = new TextDecoder();
|
||||
var buffer = "";
|
||||
var initialSwapDone = false;
|
||||
// Regex to match __sxResolve script tags
|
||||
var RESOLVE_START = "<script>window.__sxResolve&&window.__sxResolve(";
|
||||
var RESOLVE_END = ")</script>";
|
||||
|
||||
function processResolveScripts() {
|
||||
// Strip and load any extra component defs before resolve scripts
|
||||
buffer = stripSxScripts(buffer);
|
||||
var idx;
|
||||
while ((idx = buffer.indexOf(RESOLVE_START)) >= 0) {
|
||||
var endIdx = buffer.indexOf(RESOLVE_END, idx);
|
||||
if (endIdx < 0) break; // incomplete, wait for more data
|
||||
var argsStr = buffer.substring(idx + RESOLVE_START.length, endIdx);
|
||||
buffer = buffer.substring(endIdx + RESOLVE_END.length);
|
||||
// argsStr is: "stream-id","sx source"
|
||||
var commaIdx = argsStr.indexOf(",");
|
||||
if (commaIdx >= 0) {
|
||||
try {
|
||||
var id = JSON.parse(argsStr.substring(0, commaIdx));
|
||||
var sx = JSON.parse(argsStr.substring(commaIdx + 1));
|
||||
if (typeof Sx !== "undefined" && Sx.resolveSuspense) {
|
||||
Sx.resolveSuspense(id, sx);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[sx-ref] resolve parse error:", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function pump() {
|
||||
return reader.read().then(function(result) {
|
||||
buffer += decoder.decode(result.value || new Uint8Array(), { stream: !result.done });
|
||||
|
||||
if (!initialSwapDone) {
|
||||
// Look for the first resolve script — everything before it is OOB content
|
||||
var scriptIdx = buffer.indexOf("<script>window.__sxResolve");
|
||||
// If we found a script tag, or the stream is done, process OOB
|
||||
var oobEnd = scriptIdx >= 0 ? scriptIdx : (result.done ? buffer.length : -1);
|
||||
if (oobEnd >= 0) {
|
||||
var oobContent = buffer.substring(0, oobEnd);
|
||||
buffer = buffer.substring(oobEnd);
|
||||
initialSwapDone = true;
|
||||
|
||||
// Process OOB SX content (same as fetchAndRestore)
|
||||
oobContent = stripComponentScripts(oobContent);
|
||||
// Also strip bare <script type="text/sx"> (extra defs from resolve chunks)
|
||||
oobContent = stripSxScripts(oobContent);
|
||||
oobContent = extractResponseCss(oobContent);
|
||||
oobContent = oobContent.trim();
|
||||
if (oobContent.charAt(0) === "(") {
|
||||
try {
|
||||
var dom = sxRender(oobContent);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
// Dispatch clientRoute so nav links update active state
|
||||
domDispatch(target, "sx:clientRoute",
|
||||
{ pathname: new URL(url, location.href).pathname });
|
||||
} catch (err) {
|
||||
console.error("[sx-ref] streaming OOB swap error:", err);
|
||||
}
|
||||
}
|
||||
// Process any resolve scripts already in buffer
|
||||
processResolveScripts();
|
||||
}
|
||||
} else {
|
||||
// Process resolve scripts as they arrive
|
||||
processResolveScripts();
|
||||
}
|
||||
|
||||
if (!result.done) return pump();
|
||||
});
|
||||
}
|
||||
|
||||
return pump();
|
||||
}).catch(function(err) {
|
||||
console.error("[sx-ref] streaming fetch error:", err);
|
||||
location.reload();
|
||||
});
|
||||
}
|
||||
|
||||
function fetchPreload(url, headers, cache) {
|
||||
fetch(url, { headers: headers }).then(function(resp) {
|
||||
if (!resp.ok) return;
|
||||
@@ -3587,6 +3716,14 @@ PLATFORM_ORCHESTRATION_JS = """
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function stripSxScripts(text) {
|
||||
// Strip <script type="text/sx">...</script> (without data-components).
|
||||
// These contain extra component defs from streaming resolve chunks.
|
||||
var SxObj = typeof Sx !== "undefined" ? Sx : null;
|
||||
return text.replace(/<script[^>]*type="text\\/sx"[^>]*>([\\s\\S]*?)<\\/script>/gi,
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function extractResponseCss(text) {
|
||||
if (!_hasDom) return text;
|
||||
var target = document.getElementById("sx-css");
|
||||
|
||||
@@ -1255,11 +1255,15 @@ def make_page_def(name, slots, env):
|
||||
if isinstance(layout, Keyword):
|
||||
layout = layout.name
|
||||
cache = None
|
||||
stream_val = slots.get("stream")
|
||||
stream = bool(trampoline(eval_expr(stream_val, env))) if stream_val is not None else False
|
||||
return PageDef(
|
||||
name=name, path=path, auth=auth, layout=layout, cache=cache,
|
||||
data_expr=slots.get("data"), content_expr=slots.get("content"),
|
||||
filter_expr=slots.get("filter"), aside_expr=slots.get("aside"),
|
||||
menu_expr=slots.get("menu"), closure=dict(env),
|
||||
menu_expr=slots.get("menu"), stream=stream,
|
||||
fallback_expr=slots.get("fallback"), shell_expr=slots.get("shell"),
|
||||
closure=dict(env),
|
||||
)
|
||||
|
||||
|
||||
@@ -1790,7 +1794,7 @@ PRIMITIVES["concat"] = lambda *args: _b_sum((a for a in args if a), [])
|
||||
PRIMITIVES["list"] = lambda *args: _b_list(args)
|
||||
PRIMITIVES["dict"] = lambda *args: {args[i]: args[i+1] for i in _b_range(0, _b_len(args)-1, 2)}
|
||||
PRIMITIVES["range"] = lambda a, b, step=1: _b_list(_b_range(_b_int(a), _b_int(b), _b_int(step)))
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else default)
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else (c.get(k, default) if hasattr(c, 'get') else default))
|
||||
PRIMITIVES["len"] = lambda c: _b_len(c) if c is not None and c is not NIL else 0
|
||||
PRIMITIVES["first"] = lambda c: c[0] if c and _b_len(c) > 0 else NIL
|
||||
PRIMITIVES["last"] = lambda c: c[-1] if c and _b_len(c) > 0 else NIL
|
||||
@@ -1807,6 +1811,7 @@ PRIMITIVES["zip-pairs"] = lambda c: [[c[i], c[i+1]] for i in _b_range(_b_len(c)-
|
||||
PRIMITIVES["keys"] = lambda d: _b_list((d or {}).keys())
|
||||
PRIMITIVES["vals"] = lambda d: _b_list((d or {}).values())
|
||||
PRIMITIVES["merge"] = lambda *args: _sx_merge_dicts(*args)
|
||||
PRIMITIVES["has-key?"] = lambda d, k: isinstance(d, _b_dict) and k in d
|
||||
PRIMITIVES["assoc"] = lambda d, *kvs: _sx_assoc(d, *kvs)
|
||||
PRIMITIVES["dissoc"] = lambda d, *ks: {k: v for k, v in d.items() if k not in ks}
|
||||
PRIMITIVES["into"] = lambda target, coll: (_b_list(coll) if isinstance(target, _b_list) else {p[0]: p[1] for p in coll if isinstance(p, _b_list) and _b_len(p) >= 2})
|
||||
@@ -1986,6 +1991,8 @@ trim = PRIMITIVES["trim"]
|
||||
replace = PRIMITIVES["replace"]
|
||||
parse_int = PRIMITIVES["parse-int"]
|
||||
upper = PRIMITIVES["upper"]
|
||||
has_key_p = PRIMITIVES["has-key?"]
|
||||
dissoc = PRIMITIVES["dissoc"]
|
||||
'''
|
||||
|
||||
|
||||
|
||||
@@ -116,3 +116,114 @@
|
||||
(let ((pdef (make-page-def name slots env)))
|
||||
(env-set! env (str "page:" name) pdef)
|
||||
pdef))))
|
||||
|
||||
|
||||
;; ==========================================================================
|
||||
;; Page Execution Semantics
|
||||
;; ==========================================================================
|
||||
;;
|
||||
;; A PageDef describes what to render for a route. The host evaluates slots
|
||||
;; at request time. This section specifies the data → content protocol that
|
||||
;; every host must implement identically.
|
||||
;;
|
||||
;; Slots (all unevaluated AST):
|
||||
;; :path — route pattern (string)
|
||||
;; :auth — "public" | "login" | "admin"
|
||||
;; :layout — layout reference + kwargs
|
||||
;; :stream — boolean, opt into chunked transfer
|
||||
;; :shell — immediate content (contains ~suspense placeholders)
|
||||
;; :fallback — loading skeleton for single-stream mode
|
||||
;; :data — IO expression producing bindings
|
||||
;; :content — template expression evaluated with data bindings
|
||||
;; :filter, :aside, :menu — additional content slots
|
||||
;;
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Data Protocol
|
||||
;; --------------------------------------------------------------------------
|
||||
;;
|
||||
;; The :data expression is evaluated at request time. It returns one of:
|
||||
;;
|
||||
;; 1. A dict — single-stream mode (default).
|
||||
;; Each key becomes an env binding (underscores → hyphens).
|
||||
;; Then :content is evaluated once with those bindings.
|
||||
;; Result resolves the "stream-content" suspense slot.
|
||||
;;
|
||||
;; 2. A sequence of dicts — multi-stream mode.
|
||||
;; The host delivers items over time (async generator, channel, etc.).
|
||||
;; Each dict:
|
||||
;; - MUST contain "stream-id" → string matching a ~suspense :id
|
||||
;; - Remaining keys become env bindings (underscores → hyphens)
|
||||
;; - :content is re-evaluated with those bindings
|
||||
;; - Result resolves the ~suspense slot matching "stream-id"
|
||||
;; If "stream-id" is absent, defaults to "stream-content".
|
||||
;;
|
||||
;; The host is free to choose the timing mechanism:
|
||||
;; Python — async generator (yield dicts at intervals)
|
||||
;; Go — channel of dicts
|
||||
;; Haskell — conduit / streaming
|
||||
;; JS — async iterator
|
||||
;;
|
||||
;; The spec requires:
|
||||
;; (a) Each item's bindings are isolated (fresh env per item)
|
||||
;; (b) :content is evaluated independently for each item
|
||||
;; (c) Resolution is incremental — each item resolves as it arrives
|
||||
;; (d) "stream-id" routes to the correct ~suspense slot
|
||||
;;
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Streaming Execution Order
|
||||
;; --------------------------------------------------------------------------
|
||||
;;
|
||||
;; When :stream is true:
|
||||
;;
|
||||
;; 1. Evaluate :shell (if present) → HTML for immediate content slot
|
||||
;; :shell typically contains ~suspense placeholders with :fallback
|
||||
;; 2. Render HTML shell with suspense placeholders → send to client
|
||||
;; 3. Start :data evaluation concurrently with header resolution
|
||||
;; 4. As each data item arrives:
|
||||
;; a. Bind item keys into fresh env
|
||||
;; b. Evaluate :content with those bindings → SX wire format
|
||||
;; c. Send resolve script: __sxResolve(stream-id, sx)
|
||||
;; 5. Close response when all items + headers have resolved
|
||||
;;
|
||||
;; Non-streaming pages evaluate :data then :content sequentially and
|
||||
;; return the complete page in a single response.
|
||||
;;
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Spec helpers for multi-stream data protocol
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
;; Extract stream-id from a data chunk dict, defaulting to "stream-content"
|
||||
(define stream-chunk-id
|
||||
(fn (chunk)
|
||||
(if (has-key? chunk "stream-id")
|
||||
(get chunk "stream-id")
|
||||
"stream-content")))
|
||||
|
||||
;; Remove stream-id from chunk, returning only the bindings
|
||||
(define stream-chunk-bindings
|
||||
(fn (chunk)
|
||||
(dissoc chunk "stream-id")))
|
||||
|
||||
;; Normalize binding keys: underscore → hyphen
|
||||
(define normalize-binding-key
|
||||
(fn (key)
|
||||
(replace key "_" "-")))
|
||||
|
||||
;; Bind a data chunk's keys into a fresh env (isolated per chunk)
|
||||
(define bind-stream-chunk
|
||||
(fn (chunk base-env)
|
||||
(let ((env (merge {} base-env))
|
||||
(bindings (stream-chunk-bindings chunk)))
|
||||
(for-each
|
||||
(fn (key)
|
||||
(env-set! env (normalize-binding-key key)
|
||||
(get bindings key)))
|
||||
(keys bindings))
|
||||
env)))
|
||||
|
||||
;; Validate a multi-stream data result: must be a list of dicts
|
||||
(define validate-stream-data
|
||||
(fn (data)
|
||||
(and (= (type-of data) "list")
|
||||
(every? (fn (item) (= (type-of item) "dict")) data))))
|
||||
|
||||
@@ -664,6 +664,15 @@
|
||||
(has-io (and io-deps (not (empty? io-deps)))))
|
||||
;; Ensure IO deps are registered as proxied primitives
|
||||
(when has-io (register-io-deps io-deps))
|
||||
(if (get match "stream")
|
||||
;; Streaming page: fetch with streaming body reader.
|
||||
;; First chunk = OOB SX swap (shell with skeletons),
|
||||
;; subsequent chunks = resolve scripts filling slots.
|
||||
(do (log-info (str "sx:route streaming " pathname))
|
||||
(fetch-streaming target pathname
|
||||
(build-request-headers target
|
||||
(loaded-component-names) _css-hash))
|
||||
true)
|
||||
(if (get match "has-data")
|
||||
;; Data page: check cache, else resolve asynchronously
|
||||
(let ((cache-key (page-data-cache-key page-name params))
|
||||
@@ -727,7 +736,7 @@
|
||||
(do (log-info (str "sx:route server (eval failed) " pathname)) false)
|
||||
(do
|
||||
(swap-rendered-content target rendered pathname)
|
||||
true)))))))))))))))))
|
||||
true))))))))))))))))))
|
||||
|
||||
|
||||
(define bind-client-route-link
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# WARNING: special-forms.sx declares forms not in eval.sx: reset, shift
|
||||
"""
|
||||
sx_ref.py -- Generated from reference SX evaluator specification.
|
||||
|
||||
@@ -191,11 +192,15 @@ def make_page_def(name, slots, env):
|
||||
if isinstance(layout, Keyword):
|
||||
layout = layout.name
|
||||
cache = None
|
||||
stream_val = slots.get("stream")
|
||||
stream = bool(trampoline(eval_expr(stream_val, env))) if stream_val is not None else False
|
||||
return PageDef(
|
||||
name=name, path=path, auth=auth, layout=layout, cache=cache,
|
||||
data_expr=slots.get("data"), content_expr=slots.get("content"),
|
||||
filter_expr=slots.get("filter"), aside_expr=slots.get("aside"),
|
||||
menu_expr=slots.get("menu"), closure=dict(env),
|
||||
menu_expr=slots.get("menu"), stream=stream,
|
||||
fallback_expr=slots.get("fallback"), shell_expr=slots.get("shell"),
|
||||
closure=dict(env),
|
||||
)
|
||||
|
||||
|
||||
@@ -736,7 +741,7 @@ PRIMITIVES["concat"] = lambda *args: _b_sum((a for a in args if a), [])
|
||||
PRIMITIVES["list"] = lambda *args: _b_list(args)
|
||||
PRIMITIVES["dict"] = lambda *args: {args[i]: args[i+1] for i in _b_range(0, _b_len(args)-1, 2)}
|
||||
PRIMITIVES["range"] = lambda a, b, step=1: _b_list(_b_range(_b_int(a), _b_int(b), _b_int(step)))
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else default)
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else (c.get(k, default) if hasattr(c, 'get') else default))
|
||||
PRIMITIVES["len"] = lambda c: _b_len(c) if c is not None and c is not NIL else 0
|
||||
PRIMITIVES["first"] = lambda c: c[0] if c and _b_len(c) > 0 else NIL
|
||||
PRIMITIVES["last"] = lambda c: c[-1] if c and _b_len(c) > 0 else NIL
|
||||
@@ -752,6 +757,7 @@ PRIMITIVES["zip-pairs"] = lambda c: [[c[i], c[i+1]] for i in _b_range(_b_len(c)-
|
||||
PRIMITIVES["keys"] = lambda d: _b_list((d or {}).keys())
|
||||
PRIMITIVES["vals"] = lambda d: _b_list((d or {}).values())
|
||||
PRIMITIVES["merge"] = lambda *args: _sx_merge_dicts(*args)
|
||||
PRIMITIVES["has-key?"] = lambda d, k: isinstance(d, _b_dict) and k in d
|
||||
PRIMITIVES["assoc"] = lambda d, *kvs: _sx_assoc(d, *kvs)
|
||||
PRIMITIVES["dissoc"] = lambda d, *ks: {k: v for k, v in d.items() if k not in ks}
|
||||
PRIMITIVES["into"] = lambda target, coll: (_b_list(coll) if isinstance(target, _b_list) else {p[0]: p[1] for p in coll if isinstance(p, _b_list) and _b_len(p) >= 2})
|
||||
@@ -888,6 +894,8 @@ trim = PRIMITIVES["trim"]
|
||||
replace = PRIMITIVES["replace"]
|
||||
parse_int = PRIMITIVES["parse-int"]
|
||||
upper = PRIMITIVES["upper"]
|
||||
has_key_p = PRIMITIVES["has-key?"]
|
||||
dissoc = PRIMITIVES["dissoc"]
|
||||
|
||||
|
||||
# =========================================================================
|
||||
@@ -1128,6 +1136,21 @@ sf_defaction = lambda args, env: (lambda name_sym: (lambda params_raw: (lambda n
|
||||
# sf-defpage
|
||||
sf_defpage = lambda args, env: (lambda name_sym: (lambda name: (lambda slots: _sx_begin((lambda i: (lambda max_i: for_each(lambda idx: ((_sx_dict_set(slots, keyword_name(nth(args, idx)), nth(args, (idx + 1))) if sx_truthy(((idx + 1) < max_i)) else NIL) if sx_truthy(((idx < max_i) if not sx_truthy((idx < max_i)) else (type_of(nth(args, idx)) == 'keyword'))) else NIL), range(1, max_i, 2)))(len(args)))(1), (lambda pdef: _sx_begin(_sx_dict_set(env, sx_str('page:', name), pdef), pdef))(make_page_def(name, slots, env))))({}))(symbol_name(name_sym)))(first(args))
|
||||
|
||||
# stream-chunk-id
|
||||
stream_chunk_id = lambda chunk: (get(chunk, 'stream-id') if sx_truthy(has_key_p(chunk, 'stream-id')) else 'stream-content')
|
||||
|
||||
# stream-chunk-bindings
|
||||
stream_chunk_bindings = lambda chunk: dissoc(chunk, 'stream-id')
|
||||
|
||||
# normalize-binding-key
|
||||
normalize_binding_key = lambda key: replace(key, '_', '-')
|
||||
|
||||
# bind-stream-chunk
|
||||
bind_stream_chunk = lambda chunk, base_env: (lambda env: (lambda bindings: _sx_begin(for_each(lambda key: _sx_dict_set(env, normalize_binding_key(key), get(bindings, key)), keys(bindings)), env))(stream_chunk_bindings(chunk)))(merge({}, base_env))
|
||||
|
||||
# validate-stream-data
|
||||
validate_stream_data = lambda data: ((type_of(data) == 'list') if not sx_truthy((type_of(data) == 'list')) else every_p(lambda item: (type_of(item) == 'dict'), data))
|
||||
|
||||
|
||||
# === Transpiled from render (core) ===
|
||||
|
||||
@@ -1261,6 +1284,169 @@ compute_all_io_refs = lambda env, io_names: for_each(lambda name: (lambda val: (
|
||||
component_pure_p = lambda name, env, io_names: empty_p(transitive_io_refs(name, env, io_names))
|
||||
|
||||
|
||||
# === Transpiled from engine (fetch/swap/trigger pure logic) ===
|
||||
|
||||
# ENGINE_VERBS
|
||||
ENGINE_VERBS = ['get', 'post', 'put', 'delete', 'patch']
|
||||
|
||||
# DEFAULT_SWAP
|
||||
DEFAULT_SWAP = 'outerHTML'
|
||||
|
||||
# parse-time
|
||||
parse_time = lambda s: (0 if sx_truthy(is_nil(s)) else (parse_int(s, 0) if sx_truthy(ends_with_p(s, 'ms')) else ((parse_int(replace(s, 's', ''), 0) * 1000) if sx_truthy(ends_with_p(s, 's')) else parse_int(s, 0))))
|
||||
|
||||
# parse-trigger-spec
|
||||
parse_trigger_spec = lambda spec: (NIL if sx_truthy(is_nil(spec)) else (lambda raw_parts: filter(lambda x: (not sx_truthy(is_nil(x))), map(lambda part: (lambda tokens: (NIL if sx_truthy(empty_p(tokens)) else ({'event': 'every', 'modifiers': {'interval': parse_time(nth(tokens, 1))}} if sx_truthy(((first(tokens) == 'every') if not sx_truthy((first(tokens) == 'every')) else (len(tokens) >= 2))) else (lambda mods: _sx_begin(for_each(lambda tok: (_sx_dict_set(mods, 'once', True) if sx_truthy((tok == 'once')) else (_sx_dict_set(mods, 'changed', True) if sx_truthy((tok == 'changed')) else (_sx_dict_set(mods, 'delay', parse_time(slice(tok, 6))) if sx_truthy(starts_with_p(tok, 'delay:')) else (_sx_dict_set(mods, 'from', slice(tok, 5)) if sx_truthy(starts_with_p(tok, 'from:')) else NIL)))), rest(tokens)), {'event': first(tokens), 'modifiers': mods}))({}))))(split(trim(part), ' ')), raw_parts)))(split(spec, ',')))
|
||||
|
||||
# default-trigger
|
||||
default_trigger = lambda tag_name: ([{'event': 'submit', 'modifiers': {}}] if sx_truthy((tag_name == 'FORM')) else ([{'event': 'change', 'modifiers': {}}] if sx_truthy(((tag_name == 'INPUT') if sx_truthy((tag_name == 'INPUT')) else ((tag_name == 'SELECT') if sx_truthy((tag_name == 'SELECT')) else (tag_name == 'TEXTAREA')))) else [{'event': 'click', 'modifiers': {}}]))
|
||||
|
||||
# get-verb-info
|
||||
get_verb_info = lambda el: some(lambda verb: (lambda url: ({'method': upper(verb), 'url': url} if sx_truthy(url) else NIL))(dom_get_attr(el, sx_str('sx-', verb))), ENGINE_VERBS)
|
||||
|
||||
# build-request-headers
|
||||
build_request_headers = lambda el, loaded_components, css_hash: (lambda headers: _sx_begin((lambda target_sel: (_sx_dict_set(headers, 'SX-Target', target_sel) if sx_truthy(target_sel) else NIL))(dom_get_attr(el, 'sx-target')), (_sx_dict_set(headers, 'SX-Components', join(',', loaded_components)) if sx_truthy((not sx_truthy(empty_p(loaded_components)))) else NIL), (_sx_dict_set(headers, 'SX-Css', css_hash) if sx_truthy(css_hash) else NIL), (lambda extra_h: ((lambda parsed: (for_each(lambda key: _sx_dict_set(headers, key, sx_str(get(parsed, key))), keys(parsed)) if sx_truthy(parsed) else NIL))(parse_header_value(extra_h)) if sx_truthy(extra_h) else NIL))(dom_get_attr(el, 'sx-headers')), headers))({'SX-Request': 'true', 'SX-Current-URL': browser_location_href()})
|
||||
|
||||
# process-response-headers
|
||||
process_response_headers = lambda get_header: {'redirect': get_header('SX-Redirect'), 'refresh': get_header('SX-Refresh'), 'trigger': get_header('SX-Trigger'), 'retarget': get_header('SX-Retarget'), 'reswap': get_header('SX-Reswap'), 'location': get_header('SX-Location'), 'replace-url': get_header('SX-Replace-Url'), 'css-hash': get_header('SX-Css-Hash'), 'trigger-swap': get_header('SX-Trigger-After-Swap'), 'trigger-settle': get_header('SX-Trigger-After-Settle'), 'content-type': get_header('Content-Type')}
|
||||
|
||||
# parse-swap-spec
|
||||
def parse_swap_spec(raw_swap, global_transitions_p):
|
||||
_cells = {}
|
||||
parts = split((raw_swap if sx_truthy(raw_swap) else DEFAULT_SWAP), ' ')
|
||||
style = first(parts)
|
||||
_cells['use_transition'] = global_transitions_p
|
||||
for p in rest(parts):
|
||||
if sx_truthy((p == 'transition:true')):
|
||||
_cells['use_transition'] = True
|
||||
elif sx_truthy((p == 'transition:false')):
|
||||
_cells['use_transition'] = False
|
||||
return {'style': style, 'transition': _cells['use_transition']}
|
||||
|
||||
# parse-retry-spec
|
||||
parse_retry_spec = lambda retry_attr: (NIL if sx_truthy(is_nil(retry_attr)) else (lambda parts: {'strategy': first(parts), 'start-ms': parse_int(nth(parts, 1), 1000), 'cap-ms': parse_int(nth(parts, 2), 30000)})(split(retry_attr, ':')))
|
||||
|
||||
# next-retry-ms
|
||||
next_retry_ms = lambda current_ms, cap_ms: min((current_ms * 2), cap_ms)
|
||||
|
||||
# filter-params
|
||||
filter_params = lambda params_spec, all_params: (all_params if sx_truthy(is_nil(params_spec)) else ([] if sx_truthy((params_spec == 'none')) else (all_params if sx_truthy((params_spec == '*')) else ((lambda excluded: filter(lambda p: (not sx_truthy(contains_p(excluded, first(p)))), all_params))(map(trim, split(slice(params_spec, 4), ','))) if sx_truthy(starts_with_p(params_spec, 'not ')) else (lambda allowed: filter(lambda p: contains_p(allowed, first(p)), all_params))(map(trim, split(params_spec, ',')))))))
|
||||
|
||||
# resolve-target
|
||||
resolve_target = lambda el: (lambda sel: (el if sx_truthy((is_nil(sel) if sx_truthy(is_nil(sel)) else (sel == 'this'))) else (dom_parent(el) if sx_truthy((sel == 'closest')) else dom_query(sel))))(dom_get_attr(el, 'sx-target'))
|
||||
|
||||
# apply-optimistic
|
||||
apply_optimistic = lambda el: (lambda directive: (NIL if sx_truthy(is_nil(directive)) else (lambda target: (lambda state: _sx_begin((_sx_begin(_sx_dict_set(state, 'opacity', dom_get_style(target, 'opacity')), dom_set_style(target, 'opacity', '0'), dom_set_style(target, 'pointer-events', 'none')) if sx_truthy((directive == 'remove')) else (_sx_begin(_sx_dict_set(state, 'disabled', dom_get_prop(target, 'disabled')), dom_set_prop(target, 'disabled', True)) if sx_truthy((directive == 'disable')) else ((lambda cls: _sx_begin(_sx_dict_set(state, 'add-class', cls), dom_add_class(target, cls)))(slice(directive, 10)) if sx_truthy(starts_with_p(directive, 'add-class:')) else NIL))), state))({'target': target, 'directive': directive}))((resolve_target(el) if sx_truthy(resolve_target(el)) else el))))(dom_get_attr(el, 'sx-optimistic'))
|
||||
|
||||
# revert-optimistic
|
||||
revert_optimistic = lambda state: ((lambda target: (lambda directive: (_sx_begin(dom_set_style(target, 'opacity', (get(state, 'opacity') if sx_truthy(get(state, 'opacity')) else '')), dom_set_style(target, 'pointer-events', '')) if sx_truthy((directive == 'remove')) else (dom_set_prop(target, 'disabled', (get(state, 'disabled') if sx_truthy(get(state, 'disabled')) else False)) if sx_truthy((directive == 'disable')) else (dom_remove_class(target, get(state, 'add-class')) if sx_truthy(get(state, 'add-class')) else NIL))))(get(state, 'directive')))(get(state, 'target')) if sx_truthy(state) else NIL)
|
||||
|
||||
# find-oob-swaps
|
||||
find_oob_swaps = lambda container: (lambda results: _sx_begin(for_each(lambda attr: (lambda oob_els: for_each(lambda oob: (lambda swap_type: (lambda target_id: _sx_begin(dom_remove_attr(oob, attr), (_sx_append(results, {'element': oob, 'swap-type': swap_type, 'target-id': target_id}) if sx_truthy(target_id) else NIL)))(dom_id(oob)))((dom_get_attr(oob, attr) if sx_truthy(dom_get_attr(oob, attr)) else 'outerHTML')), oob_els))(dom_query_all(container, sx_str('[', attr, ']'))), ['sx-swap-oob', 'hx-swap-oob']), results))([])
|
||||
|
||||
# morph-node
|
||||
morph_node = lambda old_node, new_node: (NIL if sx_truthy((dom_has_attr_p(old_node, 'sx-preserve') if sx_truthy(dom_has_attr_p(old_node, 'sx-preserve')) else dom_has_attr_p(old_node, 'sx-ignore'))) else (dom_replace_child(dom_parent(old_node), dom_clone(new_node), old_node) if sx_truthy(((not sx_truthy((dom_node_type(old_node) == dom_node_type(new_node)))) if sx_truthy((not sx_truthy((dom_node_type(old_node) == dom_node_type(new_node))))) else (not sx_truthy((dom_node_name(old_node) == dom_node_name(new_node)))))) else ((dom_set_text_content(old_node, dom_text_content(new_node)) if sx_truthy((not sx_truthy((dom_text_content(old_node) == dom_text_content(new_node))))) else NIL) if sx_truthy(((dom_node_type(old_node) == 3) if sx_truthy((dom_node_type(old_node) == 3)) else (dom_node_type(old_node) == 8))) else (_sx_begin(sync_attrs(old_node, new_node), (morph_children(old_node, new_node) if sx_truthy((not sx_truthy((dom_is_active_element_p(old_node) if not sx_truthy(dom_is_active_element_p(old_node)) else dom_is_input_element_p(old_node))))) else NIL)) if sx_truthy((dom_node_type(old_node) == 1)) else NIL))))
|
||||
|
||||
# sync-attrs
|
||||
sync_attrs = _sx_fn(lambda old_el, new_el: (
|
||||
for_each(lambda attr: (lambda name: (lambda val: (dom_set_attr(old_el, name, val) if sx_truthy((not sx_truthy((dom_get_attr(old_el, name) == val)))) else NIL))(nth(attr, 1)))(first(attr)), dom_attr_list(new_el)),
|
||||
for_each(lambda attr: (dom_remove_attr(old_el, first(attr)) if sx_truthy((not sx_truthy(dom_has_attr_p(new_el, first(attr))))) else NIL), dom_attr_list(old_el))
|
||||
)[-1])
|
||||
|
||||
# morph-children
|
||||
def morph_children(old_parent, new_parent):
|
||||
_cells = {}
|
||||
old_kids = dom_child_list(old_parent)
|
||||
new_kids = dom_child_list(new_parent)
|
||||
old_by_id = reduce(lambda acc, kid: (lambda id: (_sx_begin(_sx_dict_set(acc, id, kid), acc) if sx_truthy(id) else acc))(dom_id(kid)), {}, old_kids)
|
||||
_cells['oi'] = 0
|
||||
for new_child in new_kids:
|
||||
match_id = dom_id(new_child)
|
||||
match_by_id = (dict_get(old_by_id, match_id) if sx_truthy(match_id) else NIL)
|
||||
if sx_truthy((match_by_id if not sx_truthy(match_by_id) else (not sx_truthy(is_nil(match_by_id))))):
|
||||
if sx_truthy(((_cells['oi'] < len(old_kids)) if not sx_truthy((_cells['oi'] < len(old_kids))) else (not sx_truthy((match_by_id == nth(old_kids, _cells['oi'])))))):
|
||||
dom_insert_before(old_parent, match_by_id, (nth(old_kids, _cells['oi']) if sx_truthy((_cells['oi'] < len(old_kids))) else NIL))
|
||||
morph_node(match_by_id, new_child)
|
||||
_cells['oi'] = (_cells['oi'] + 1)
|
||||
elif sx_truthy((_cells['oi'] < len(old_kids))):
|
||||
old_child = nth(old_kids, _cells['oi'])
|
||||
if sx_truthy((dom_id(old_child) if not sx_truthy(dom_id(old_child)) else (not sx_truthy(match_id)))):
|
||||
dom_insert_before(old_parent, dom_clone(new_child), old_child)
|
||||
else:
|
||||
morph_node(old_child, new_child)
|
||||
_cells['oi'] = (_cells['oi'] + 1)
|
||||
else:
|
||||
dom_append(old_parent, dom_clone(new_child))
|
||||
return for_each(lambda i: ((lambda leftover: (dom_remove_child(old_parent, leftover) if sx_truthy((dom_is_child_of_p(leftover, old_parent) if not sx_truthy(dom_is_child_of_p(leftover, old_parent)) else ((not sx_truthy(dom_has_attr_p(leftover, 'sx-preserve'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(leftover, 'sx-preserve')))) else (not sx_truthy(dom_has_attr_p(leftover, 'sx-ignore')))))) else NIL))(nth(old_kids, i)) if sx_truthy((i >= _cells['oi'])) else NIL), range(_cells['oi'], len(old_kids)))
|
||||
|
||||
# swap-dom-nodes
|
||||
swap_dom_nodes = lambda target, new_nodes, strategy: _sx_case(strategy, [('innerHTML', lambda: (morph_children(target, new_nodes) if sx_truthy(dom_is_fragment_p(new_nodes)) else (lambda wrapper: _sx_begin(dom_append(wrapper, new_nodes), morph_children(target, wrapper)))(dom_create_element('div', NIL)))), ('outerHTML', lambda: (lambda parent: _sx_begin(((lambda fc: (_sx_begin(morph_node(target, fc), (lambda sib: insert_remaining_siblings(parent, target, sib))(dom_next_sibling(fc))) if sx_truthy(fc) else dom_remove_child(parent, target)))(dom_first_child(new_nodes)) if sx_truthy(dom_is_fragment_p(new_nodes)) else morph_node(target, new_nodes)), parent))(dom_parent(target))), ('afterend', lambda: dom_insert_after(target, new_nodes)), ('beforeend', lambda: dom_append(target, new_nodes)), ('afterbegin', lambda: dom_prepend(target, new_nodes)), ('beforebegin', lambda: dom_insert_before(dom_parent(target), new_nodes, target)), ('delete', lambda: dom_remove_child(dom_parent(target), target)), ('none', lambda: NIL), (None, lambda: (morph_children(target, new_nodes) if sx_truthy(dom_is_fragment_p(new_nodes)) else (lambda wrapper: _sx_begin(dom_append(wrapper, new_nodes), morph_children(target, wrapper)))(dom_create_element('div', NIL))))])
|
||||
|
||||
# insert-remaining-siblings
|
||||
insert_remaining_siblings = lambda parent, ref_node, sib: ((lambda next: _sx_begin(dom_insert_after(ref_node, sib), insert_remaining_siblings(parent, sib, next)))(dom_next_sibling(sib)) if sx_truthy(sib) else NIL)
|
||||
|
||||
# swap-html-string
|
||||
swap_html_string = lambda target, html, strategy: _sx_case(strategy, [('innerHTML', lambda: dom_set_inner_html(target, html)), ('outerHTML', lambda: (lambda parent: _sx_begin(dom_insert_adjacent_html(target, 'afterend', html), dom_remove_child(parent, target), parent))(dom_parent(target))), ('afterend', lambda: dom_insert_adjacent_html(target, 'afterend', html)), ('beforeend', lambda: dom_insert_adjacent_html(target, 'beforeend', html)), ('afterbegin', lambda: dom_insert_adjacent_html(target, 'afterbegin', html)), ('beforebegin', lambda: dom_insert_adjacent_html(target, 'beforebegin', html)), ('delete', lambda: dom_remove_child(dom_parent(target), target)), ('none', lambda: NIL), (None, lambda: dom_set_inner_html(target, html))])
|
||||
|
||||
# handle-history
|
||||
handle_history = lambda el, url, resp_headers: (lambda push_url: (lambda replace_url: (lambda hdr_replace: (browser_replace_state(hdr_replace) if sx_truthy(hdr_replace) else (browser_push_state((url if sx_truthy((push_url == 'true')) else push_url)) if sx_truthy((push_url if not sx_truthy(push_url) else (not sx_truthy((push_url == 'false'))))) else (browser_replace_state((url if sx_truthy((replace_url == 'true')) else replace_url)) if sx_truthy((replace_url if not sx_truthy(replace_url) else (not sx_truthy((replace_url == 'false'))))) else NIL))))(get(resp_headers, 'replace-url')))(dom_get_attr(el, 'sx-replace-url')))(dom_get_attr(el, 'sx-push-url'))
|
||||
|
||||
# PRELOAD_TTL
|
||||
PRELOAD_TTL = 30000
|
||||
|
||||
# preload-cache-get
|
||||
preload_cache_get = lambda cache, url: (lambda entry: (NIL if sx_truthy(is_nil(entry)) else (_sx_begin(dict_delete(cache, url), NIL) if sx_truthy(((now_ms() - get(entry, 'timestamp')) > PRELOAD_TTL)) else _sx_begin(dict_delete(cache, url), entry))))(dict_get(cache, url))
|
||||
|
||||
# preload-cache-set
|
||||
preload_cache_set = lambda cache, url, text, content_type: _sx_dict_set(cache, url, {'text': text, 'content-type': content_type, 'timestamp': now_ms()})
|
||||
|
||||
# classify-trigger
|
||||
classify_trigger = lambda trigger: (lambda event: ('poll' if sx_truthy((event == 'every')) else ('intersect' if sx_truthy((event == 'intersect')) else ('load' if sx_truthy((event == 'load')) else ('revealed' if sx_truthy((event == 'revealed')) else 'event')))))(get(trigger, 'event'))
|
||||
|
||||
# should-boost-link?
|
||||
should_boost_link_p = lambda link: (lambda href: (href if not sx_truthy(href) else ((not sx_truthy(starts_with_p(href, '#'))) if not sx_truthy((not sx_truthy(starts_with_p(href, '#')))) else ((not sx_truthy(starts_with_p(href, 'javascript:'))) if not sx_truthy((not sx_truthy(starts_with_p(href, 'javascript:')))) else ((not sx_truthy(starts_with_p(href, 'mailto:'))) if not sx_truthy((not sx_truthy(starts_with_p(href, 'mailto:')))) else (browser_same_origin_p(href) if not sx_truthy(browser_same_origin_p(href)) else ((not sx_truthy(dom_has_attr_p(link, 'sx-get'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(link, 'sx-get')))) else ((not sx_truthy(dom_has_attr_p(link, 'sx-post'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(link, 'sx-post')))) else (not sx_truthy(dom_has_attr_p(link, 'sx-disable')))))))))))(dom_get_attr(link, 'href'))
|
||||
|
||||
# should-boost-form?
|
||||
should_boost_form_p = lambda form: ((not sx_truthy(dom_has_attr_p(form, 'sx-get'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(form, 'sx-get')))) else ((not sx_truthy(dom_has_attr_p(form, 'sx-post'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(form, 'sx-post')))) else (not sx_truthy(dom_has_attr_p(form, 'sx-disable')))))
|
||||
|
||||
# parse-sse-swap
|
||||
parse_sse_swap = lambda el: (dom_get_attr(el, 'sx-sse-swap') if sx_truthy(dom_get_attr(el, 'sx-sse-swap')) else 'message')
|
||||
|
||||
|
||||
# === Transpiled from router (client-side route matching) ===
|
||||
|
||||
# split-path-segments
|
||||
split_path_segments = lambda path: (lambda trimmed: (lambda trimmed2: ([] if sx_truthy(empty_p(trimmed2)) else split(trimmed2, '/')))((slice(trimmed, 0, (len(trimmed) - 1)) if sx_truthy(((not sx_truthy(empty_p(trimmed))) if not sx_truthy((not sx_truthy(empty_p(trimmed)))) else ends_with_p(trimmed, '/'))) else trimmed)))((slice(path, 1) if sx_truthy(starts_with_p(path, '/')) else path))
|
||||
|
||||
# make-route-segment
|
||||
make_route_segment = lambda seg: ((lambda param_name: (lambda d: _sx_begin(_sx_dict_set(d, 'type', 'param'), _sx_dict_set(d, 'value', param_name), d))({}))(slice(seg, 1, (len(seg) - 1))) if sx_truthy((starts_with_p(seg, '<') if not sx_truthy(starts_with_p(seg, '<')) else ends_with_p(seg, '>'))) else (lambda d: _sx_begin(_sx_dict_set(d, 'type', 'literal'), _sx_dict_set(d, 'value', seg), d))({}))
|
||||
|
||||
# parse-route-pattern
|
||||
parse_route_pattern = lambda pattern: (lambda segments: map(make_route_segment, segments))(split_path_segments(pattern))
|
||||
|
||||
# match-route-segments
|
||||
def match_route_segments(path_segs, parsed_segs):
|
||||
_cells = {}
|
||||
return (NIL if sx_truthy((not sx_truthy((len(path_segs) == len(parsed_segs))))) else (lambda params: _sx_begin(_sx_cell_set(_cells, 'matched', True), _sx_begin(for_each_indexed(lambda i, parsed_seg: ((lambda path_seg: (lambda seg_type: ((_sx_cell_set(_cells, 'matched', False) if sx_truthy((not sx_truthy((path_seg == get(parsed_seg, 'value'))))) else NIL) if sx_truthy((seg_type == 'literal')) else (_sx_dict_set(params, get(parsed_seg, 'value'), path_seg) if sx_truthy((seg_type == 'param')) else _sx_cell_set(_cells, 'matched', False))))(get(parsed_seg, 'type')))(nth(path_segs, i)) if sx_truthy(_cells['matched']) else NIL), parsed_segs), (params if sx_truthy(_cells['matched']) else NIL))))({}))
|
||||
|
||||
# match-route
|
||||
match_route = lambda path, pattern: (lambda path_segs: (lambda parsed_segs: match_route_segments(path_segs, parsed_segs))(parse_route_pattern(pattern)))(split_path_segments(path))
|
||||
|
||||
# find-matching-route
|
||||
def find_matching_route(path, routes):
|
||||
_cells = {}
|
||||
path_segs = split_path_segments(path)
|
||||
_cells['result'] = NIL
|
||||
for route in routes:
|
||||
if sx_truthy(is_nil(_cells['result'])):
|
||||
params = match_route_segments(path_segs, get(route, 'parsed'))
|
||||
if sx_truthy((not sx_truthy(is_nil(params)))):
|
||||
matched = merge(route, {})
|
||||
matched['params'] = params
|
||||
_cells['result'] = matched
|
||||
return _cells['result']
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Fixups -- wire up render adapter dispatch
|
||||
# =========================================================================
|
||||
|
||||
@@ -492,3 +492,177 @@
|
||||
(assert-equal 0 (reduce (fn (acc x) (+ acc x)) 0 (list)))
|
||||
(assert-equal 0 (len (list)))
|
||||
(assert-equal "" (str))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; defpage
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "defpage"
|
||||
(deftest "basic defpage returns page-def"
|
||||
(let ((p (defpage test-basic :path "/test" :auth :public :content (div "hello"))))
|
||||
(assert-true (not (nil? p)))
|
||||
(assert-equal "test-basic" (get p "name"))
|
||||
(assert-equal "/test" (get p "path"))
|
||||
(assert-equal "public" (get p "auth"))))
|
||||
|
||||
(deftest "defpage content expr is unevaluated AST"
|
||||
(let ((p (defpage test-content :path "/c" :auth :public :content (~my-comp :title "hi"))))
|
||||
(assert-true (not (nil? (get p "content"))))))
|
||||
|
||||
(deftest "defpage with :stream"
|
||||
(let ((p (defpage test-stream :path "/s" :auth :public :stream true :content (div "x"))))
|
||||
(assert-equal true (get p "stream"))))
|
||||
|
||||
(deftest "defpage with :shell"
|
||||
(let ((p (defpage test-shell :path "/sh" :auth :public :stream true
|
||||
:shell (~my-layout (~suspense :id "data" :fallback (div "loading...")))
|
||||
:content (~my-streamed :data data-val))))
|
||||
(assert-true (not (nil? (get p "shell"))))
|
||||
(assert-true (not (nil? (get p "content"))))))
|
||||
|
||||
(deftest "defpage with :fallback"
|
||||
(let ((p (defpage test-fallback :path "/f" :auth :public :stream true
|
||||
:fallback (div :class "skeleton" "loading")
|
||||
:content (div "done"))))
|
||||
(assert-true (not (nil? (get p "fallback"))))))
|
||||
|
||||
(deftest "defpage with :data"
|
||||
(let ((p (defpage test-data :path "/d" :auth :public
|
||||
:data (fetch-items)
|
||||
:content (~items-list :items items))))
|
||||
(assert-true (not (nil? (get p "data"))))))
|
||||
|
||||
(deftest "defpage missing fields are nil"
|
||||
(let ((p (defpage test-minimal :path "/m" :auth :public :content (div "x"))))
|
||||
(assert-nil (get p "data"))
|
||||
(assert-nil (get p "filter"))
|
||||
(assert-nil (get p "aside"))
|
||||
(assert-nil (get p "menu"))
|
||||
(assert-nil (get p "shell"))
|
||||
(assert-nil (get p "fallback"))
|
||||
(assert-equal false (get p "stream")))))
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Multi-stream data protocol (from forms.sx)
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "stream-chunk-id"
|
||||
(deftest "extracts stream-id from chunk"
|
||||
(assert-equal "my-slot" (stream-chunk-id {"stream-id" "my-slot" "x" 1})))
|
||||
|
||||
(deftest "defaults to stream-content when missing"
|
||||
(assert-equal "stream-content" (stream-chunk-id {"x" 1 "y" 2}))))
|
||||
|
||||
(defsuite "stream-chunk-bindings"
|
||||
(deftest "removes stream-id from chunk"
|
||||
(let ((bindings (stream-chunk-bindings {"stream-id" "slot" "name" "alice" "age" 30})))
|
||||
(assert-equal "alice" (get bindings "name"))
|
||||
(assert-equal 30 (get bindings "age"))
|
||||
(assert-nil (get bindings "stream-id"))))
|
||||
|
||||
(deftest "returns all keys when no stream-id"
|
||||
(let ((bindings (stream-chunk-bindings {"a" 1 "b" 2})))
|
||||
(assert-equal 1 (get bindings "a"))
|
||||
(assert-equal 2 (get bindings "b")))))
|
||||
|
||||
(defsuite "normalize-binding-key"
|
||||
(deftest "converts underscores to hyphens"
|
||||
(assert-equal "my-key" (normalize-binding-key "my_key")))
|
||||
|
||||
(deftest "leaves hyphens unchanged"
|
||||
(assert-equal "my-key" (normalize-binding-key "my-key")))
|
||||
|
||||
(deftest "handles multiple underscores"
|
||||
(assert-equal "a-b-c" (normalize-binding-key "a_b_c"))))
|
||||
|
||||
(defsuite "bind-stream-chunk"
|
||||
(deftest "creates fresh env with bindings"
|
||||
(let ((base {"existing" 42})
|
||||
(chunk {"stream-id" "slot" "user-name" "bob" "count" 5})
|
||||
(env (bind-stream-chunk chunk base)))
|
||||
;; Base env bindings are preserved
|
||||
(assert-equal 42 (get env "existing"))
|
||||
;; Chunk bindings are added (stream-id removed)
|
||||
(assert-equal "bob" (get env "user-name"))
|
||||
(assert-equal 5 (get env "count"))
|
||||
;; stream-id is not in env
|
||||
(assert-nil (get env "stream-id"))))
|
||||
|
||||
(deftest "isolates env from base — bindings don't leak to base"
|
||||
(let ((base {"x" 1})
|
||||
(chunk {"stream-id" "s" "y" 2})
|
||||
(env (bind-stream-chunk chunk base)))
|
||||
;; Chunk bindings should not appear in base
|
||||
(assert-nil (get base "y"))
|
||||
;; Base bindings should be in derived env
|
||||
(assert-equal 1 (get env "x")))))
|
||||
|
||||
(defsuite "validate-stream-data"
|
||||
(deftest "valid: list of dicts"
|
||||
(assert-true (validate-stream-data
|
||||
(list {"stream-id" "a" "x" 1}
|
||||
{"stream-id" "b" "y" 2}))))
|
||||
|
||||
(deftest "valid: empty list"
|
||||
(assert-true (validate-stream-data (list))))
|
||||
|
||||
(deftest "invalid: single dict (not a list)"
|
||||
(assert-equal false (validate-stream-data {"x" 1})))
|
||||
|
||||
(deftest "invalid: list containing non-dict"
|
||||
(assert-equal false (validate-stream-data (list {"x" 1} "oops" {"y" 2})))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Multi-stream end-to-end scenarios
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "multi-stream routing"
|
||||
(deftest "stream-chunk-id routes different chunks to different slots"
|
||||
(let ((chunks (list
|
||||
{"stream-id" "stream-fast" "msg" "quick"}
|
||||
{"stream-id" "stream-medium" "msg" "steady"}
|
||||
{"stream-id" "stream-slow" "msg" "slow"}))
|
||||
(ids (map stream-chunk-id chunks)))
|
||||
(assert-equal "stream-fast" (nth ids 0))
|
||||
(assert-equal "stream-medium" (nth ids 1))
|
||||
(assert-equal "stream-slow" (nth ids 2))))
|
||||
|
||||
(deftest "bind-stream-chunk creates isolated envs per chunk"
|
||||
(let ((base {"layout" "main"})
|
||||
(chunk-a {"stream-id" "a" "title" "First" "count" 1})
|
||||
(chunk-b {"stream-id" "b" "title" "Second" "count" 2})
|
||||
(env-a (bind-stream-chunk chunk-a base))
|
||||
(env-b (bind-stream-chunk chunk-b base)))
|
||||
;; Each env has its own bindings
|
||||
(assert-equal "First" (get env-a "title"))
|
||||
(assert-equal "Second" (get env-b "title"))
|
||||
(assert-equal 1 (get env-a "count"))
|
||||
(assert-equal 2 (get env-b "count"))
|
||||
;; Both share base
|
||||
(assert-equal "main" (get env-a "layout"))
|
||||
(assert-equal "main" (get env-b "layout"))
|
||||
;; Neither leaks into base
|
||||
(assert-nil (get base "title"))))
|
||||
|
||||
(deftest "normalize-binding-key applied to chunk keys"
|
||||
(let ((chunk {"stream-id" "s" "user_name" "alice" "item_count" 3})
|
||||
(bindings (stream-chunk-bindings chunk)))
|
||||
;; Keys with underscores need normalizing for SX env
|
||||
(assert-equal "alice" (get bindings "user_name"))
|
||||
;; normalize-binding-key converts them
|
||||
(assert-equal "user-name" (normalize-binding-key "user_name"))
|
||||
(assert-equal "item-count" (normalize-binding-key "item_count"))))
|
||||
|
||||
(deftest "defpage stream flag defaults to false"
|
||||
(let ((p (defpage test-no-stream :path "/ns" :auth :public :content (div "x"))))
|
||||
(assert-equal false (get p "stream"))))
|
||||
|
||||
(deftest "defpage stream true recorded in page-def"
|
||||
(let ((p (defpage test-with-stream :path "/ws" :auth :public
|
||||
:stream true
|
||||
:shell (~layout (~suspense :id "data"))
|
||||
:content (~chunk :val val))))
|
||||
(assert-equal true (get p "stream"))
|
||||
(assert-true (not (nil? (get p "shell")))))))
|
||||
|
||||
@@ -122,4 +122,40 @@
|
||||
;; /docs/ should match docs-index, not docs-page
|
||||
(let ((result (find-matching-route "/docs/" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal "docs-index" (get result "name"))))))
|
||||
(assert-equal "docs-index" (get result "name")))))
|
||||
|
||||
(deftest "propagates stream flag from route"
|
||||
(let ((routes (list
|
||||
{:pattern "/demo/streaming"
|
||||
:parsed (parse-route-pattern "/demo/streaming")
|
||||
:name "streaming-demo"
|
||||
:stream true
|
||||
:has-data true})))
|
||||
(let ((result (find-matching-route "/demo/streaming" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal true (get result "stream"))
|
||||
(assert-equal true (get result "has-data")))))
|
||||
|
||||
(deftest "non-streaming route has no stream flag"
|
||||
(let ((routes (list
|
||||
{:pattern "/about"
|
||||
:parsed (parse-route-pattern "/about")
|
||||
:name "about"
|
||||
:has-data false})))
|
||||
(let ((result (find-matching-route "/about" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-nil (get result "stream")))))
|
||||
|
||||
(deftest "streaming route with params propagates all properties"
|
||||
(let ((routes (list
|
||||
{:pattern "/stream/<id>"
|
||||
:parsed (parse-route-pattern "/stream/<id>")
|
||||
:name "stream-page"
|
||||
:stream true
|
||||
:has-data true
|
||||
:content "expr"})))
|
||||
(let ((result (find-matching-route "/stream/fast" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal true (get result "stream"))
|
||||
(assert-equal "fast" (get (get result "params") "id"))
|
||||
(assert-equal "expr" (get result "content"))))))
|
||||
|
||||
@@ -327,6 +327,25 @@ def _load_engine_from_bootstrap(env):
|
||||
eval_file("engine.sx", env)
|
||||
|
||||
|
||||
def _load_forms_from_bootstrap(env):
|
||||
"""Load forms functions (including streaming protocol) from sx_ref.py."""
|
||||
try:
|
||||
from shared.sx.ref.sx_ref import (
|
||||
stream_chunk_id,
|
||||
stream_chunk_bindings,
|
||||
normalize_binding_key,
|
||||
bind_stream_chunk,
|
||||
validate_stream_data,
|
||||
)
|
||||
env["stream-chunk-id"] = stream_chunk_id
|
||||
env["stream-chunk-bindings"] = stream_chunk_bindings
|
||||
env["normalize-binding-key"] = normalize_binding_key
|
||||
env["bind-stream-chunk"] = bind_stream_chunk
|
||||
env["validate-stream-data"] = validate_stream_data
|
||||
except ImportError:
|
||||
eval_file("forms.sx", env)
|
||||
|
||||
|
||||
def main():
|
||||
global passed, failed, test_num
|
||||
|
||||
@@ -362,6 +381,8 @@ def main():
|
||||
continue
|
||||
|
||||
# Load prerequisite spec modules
|
||||
if spec_name == "eval":
|
||||
_load_forms_from_bootstrap(env)
|
||||
if spec_name == "router":
|
||||
_load_router_from_bootstrap(env)
|
||||
if spec_name == "deps":
|
||||
|
||||
@@ -6,7 +6,7 @@ against the bootstrap-compiled evaluator to verify correctness.
|
||||
|
||||
import pytest
|
||||
from shared.sx.parser import parse
|
||||
from shared.sx.types import Symbol, Keyword, NIL, Lambda, Component, Macro
|
||||
from shared.sx.types import Symbol, Keyword, NIL, Lambda, Component, Macro, PageDef
|
||||
from shared.sx.ref import sx_ref
|
||||
|
||||
|
||||
|
||||
@@ -243,8 +243,24 @@ class PageDef:
|
||||
menu_expr: Any
|
||||
stream: bool = False # enable streaming response
|
||||
fallback_expr: Any = None # fallback content while streaming
|
||||
shell_expr: Any = None # immediate shell content (wraps suspense)
|
||||
closure: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
_FIELD_MAP = {
|
||||
"name": "name", "path": "path", "auth": "auth",
|
||||
"layout": "layout", "cache": "cache",
|
||||
"data": "data_expr", "content": "content_expr",
|
||||
"filter": "filter_expr", "aside": "aside_expr",
|
||||
"menu": "menu_expr", "stream": "stream",
|
||||
"fallback": "fallback_expr", "shell": "shell_expr",
|
||||
}
|
||||
|
||||
def get(self, key, default=None):
|
||||
attr = self._FIELD_MAP.get(key)
|
||||
if attr is not None:
|
||||
return getattr(self, attr)
|
||||
return default
|
||||
|
||||
def __repr__(self):
|
||||
return f"<page:{self.name} path={self.path!r}>"
|
||||
|
||||
|
||||
@@ -72,5 +72,5 @@
|
||||
|
||||
(define-page-helper "streaming-demo-data"
|
||||
:params ()
|
||||
:returns "dict"
|
||||
:returns "async-generator<dict>"
|
||||
:service "sx")
|
||||
|
||||
@@ -147,7 +147,9 @@
|
||||
(dict :label "SX CI Pipeline" :href "/plans/sx-ci"
|
||||
:summary "Build, test, and deploy in s-expressions — CI pipelines as SX components.")
|
||||
(dict :label "CSSX Components" :href "/plans/cssx-components"
|
||||
:summary "Styling as components — replace the style dictionary with regular defcomps that apply classes, respond to data, and compose naturally.")))
|
||||
:summary "Styling as components — replace the style dictionary with regular defcomps that apply classes, respond to data, and compose naturally.")
|
||||
(dict :label "Live Streaming" :href "/plans/live-streaming"
|
||||
:summary "SSE and WebSocket transports for re-resolving suspense slots after initial page load — live data, real-time collaboration.")))
|
||||
|
||||
(define bootstrappers-nav-items (list
|
||||
(dict :label "Overview" :href "/bootstrappers/")
|
||||
|
||||
112
sx/sx/plans.sx
112
sx/sx/plans.sx
@@ -2314,3 +2314,115 @@
|
||||
(~doc-section :title "Philosophy" :id "philosophy"
|
||||
(p "The web has spent two decades building increasingly complex CSS tooling: preprocessors, CSS-in-JS, atomic CSS, utility frameworks, design tokens, style dictionaries. Each solves a real problem but adds a new system with its own caching, bundling, and mental model.")
|
||||
(p "CSSX components collapse all of this back to the simplest possible thing: " (strong "a function that takes data and returns markup with classes.") " That's what a component already is. There is no separate styling system because there doesn't need to be."))))
|
||||
|
||||
;; ---------------------------------------------------------------------------
|
||||
;; Live Streaming — SSE & WebSocket
|
||||
;; ---------------------------------------------------------------------------
|
||||
|
||||
(defcomp ~plan-live-streaming-content ()
|
||||
(~doc-page :title "Live Streaming"
|
||||
|
||||
(~doc-section :title "Context" :id "context"
|
||||
(p "SX streaming currently uses chunked transfer encoding: the server sends an HTML shell with "
|
||||
(code "~suspense") " placeholders, then resolves each one via inline "
|
||||
(code "<script>__sxResolve(id, sx)</script>") " chunks as IO completes. "
|
||||
"Once the response finishes, the connection closes. Each slot resolves exactly once.")
|
||||
(p "This is powerful for initial page load but doesn't support live updates "
|
||||
"— dashboard metrics, chat messages, collaborative editing, real-time notifications. "
|
||||
"For that we need a persistent transport: " (strong "SSE") " (Server-Sent Events) or " (strong "WebSockets") ".")
|
||||
(p "The key insight: the client already has " (code "Sx.resolveSuspense(id, sxSource)") " which replaces "
|
||||
"DOM content by suspense ID. A persistent connection just needs to keep calling it."))
|
||||
|
||||
(~doc-section :title "Design" :id "design"
|
||||
|
||||
(~doc-subsection :title "Transport Hierarchy"
|
||||
(p "Three tiers, progressively more capable:")
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li (strong "Chunked streaming") " (done) — single HTTP response, each suspense resolves once. "
|
||||
"Best for: initial page load with slow IO.")
|
||||
(li (strong "SSE") " — persistent one-way connection, server pushes resolve events. "
|
||||
"Best for: dashboards, notifications, progress bars, any read-only live data.")
|
||||
(li (strong "WebSocket") " — bidirectional, client can send events back. "
|
||||
"Best for: chat, collaborative editing, interactive applications.")))
|
||||
|
||||
(~doc-subsection :title "SSE Protocol"
|
||||
(p "A " (code "~live") " component declares a persistent connection to an SSE endpoint:")
|
||||
(~doc-code :code (highlight "(~live :src \"/api/stream/dashboard\"\n (~suspense :id \"cpu\" :fallback (span \"Loading...\"))\n (~suspense :id \"memory\" :fallback (span \"Loading...\"))\n (~suspense :id \"requests\" :fallback (span \"Loading...\")))" "lisp"))
|
||||
(p "The server SSE endpoint yields SX resolve events:")
|
||||
(~doc-code :code (highlight "async def dashboard_stream():\n while True:\n stats = await get_system_stats()\n yield sx_sse_event(\"cpu\", f'(~stat-badge :value \"{stats.cpu}%\")')\n yield sx_sse_event(\"memory\", f'(~stat-badge :value \"{stats.mem}%\")')\n await asyncio.sleep(1)" "python"))
|
||||
(p "SSE wire format — each event is a suspense resolve:")
|
||||
(~doc-code :code (highlight "event: sx-resolve\ndata: {\"id\": \"cpu\", \"sx\": \"(~stat-badge :value \\\"42%\\\")\"}\n\nevent: sx-resolve\ndata: {\"id\": \"memory\", \"sx\": \"(~stat-badge :value \\\"68%\\\")\"}" "text")))
|
||||
|
||||
(~doc-subsection :title "WebSocket Protocol"
|
||||
(p "A " (code "~ws") " component establishes a bidirectional channel:")
|
||||
(~doc-code :code (highlight "(~ws :src \"/ws/chat\"\n :on-message handle-chat-message\n (~suspense :id \"messages\" :fallback (div \"Connecting...\"))\n (~suspense :id \"typing\" :fallback (span)))" "lisp"))
|
||||
(p "Client can send SX expressions back:")
|
||||
(~doc-code :code (highlight ";; Client sends:\n(sx-send ws-conn '(chat-message :text \"hello\" :user \"alice\"))\n\n;; Server receives, broadcasts to all connected clients:\n;; event: sx-resolve for \"messages\" suspense" "lisp")))
|
||||
|
||||
(~doc-subsection :title "Shared Resolution Mechanism"
|
||||
(p "All three transports use the same client-side resolution:")
|
||||
(ul :class "list-disc list-inside space-y-1 text-stone-600 text-sm"
|
||||
(li (code "Sx.resolveSuspense(id, sxSource)") " — already exists, parses SX and renders to DOM")
|
||||
(li "SSE: " (code "EventSource") " → " (code "onmessage") " → " (code "resolveSuspense()"))
|
||||
(li "WS: " (code "WebSocket") " → " (code "onmessage") " → " (code "resolveSuspense()"))
|
||||
(li "The component env (defs needed for rendering) can be sent once on connection open")
|
||||
(li "Subsequent events only need the SX expression — lightweight wire format"))))
|
||||
|
||||
(~doc-section :title "Implementation" :id "implementation"
|
||||
|
||||
(~doc-subsection :title "Phase 1: SSE Infrastructure"
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li "Add " (code "~live") " component to " (code "shared/sx/templates/") " — renders child suspense placeholders, "
|
||||
"emits " (code "data-sx-live") " attribute with SSE endpoint URL")
|
||||
(li "Add " (code "sx-live.js") " client module — on boot, finds " (code "[data-sx-live]") " elements, "
|
||||
"opens EventSource, routes events to " (code "resolveSuspense()"))
|
||||
(li "Add " (code "sx_sse_event(id, sx)") " helper for Python SSE endpoints — formats SSE wire protocol")
|
||||
(li "Add " (code "sse_stream()") " Quart helper — returns async generator Response with correct headers")))
|
||||
|
||||
(~doc-subsection :title "Phase 2: Defpage Integration"
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li "New " (code ":live") " defpage slot — declares SSE endpoint + suspense bindings")
|
||||
(li "Auto-mount SSE endpoint alongside the page route")
|
||||
(li "Component defs sent as first SSE event on connection open")
|
||||
(li "Automatic reconnection with exponential backoff")))
|
||||
|
||||
(~doc-subsection :title "Phase 3: WebSocket"
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li "Add " (code "~ws") " component — bidirectional channel with send/receive")
|
||||
(li "Add " (code "sx-ws.js") " client module — WebSocket management, message routing")
|
||||
(li "Server-side: Quart WebSocket handlers that receive and broadcast SX events")
|
||||
(li "Client-side: " (code "sx-send") " primitive for sending SX expressions to server")))
|
||||
|
||||
(~doc-subsection :title "Phase 4: Spec & Boundary"
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li "Spec " (code "~live") " and " (code "~ws") " in " (code "render.sx") " (how they render in each mode)")
|
||||
(li "Add SSE/WS IO primitives to " (code "boundary.sx"))
|
||||
(li "Bootstrap SSE/WS connection management into " (code "sx-ref.js"))
|
||||
(li "Spec-level tests for resolve, reconnection, and message routing"))))
|
||||
|
||||
(~doc-section :title "Files" :id "files"
|
||||
(table :class "w-full text-left border-collapse"
|
||||
(thead
|
||||
(tr :class "border-b border-stone-200"
|
||||
(th :class "px-3 py-2 font-medium text-stone-600" "File")
|
||||
(th :class "px-3 py-2 font-medium text-stone-600" "Purpose")))
|
||||
(tbody
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/sx/templates/live.sx")
|
||||
(td :class "px-3 py-2 text-stone-700" "~live component definition"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/static/scripts/sx-live.js")
|
||||
(td :class "px-3 py-2 text-stone-700" "SSE client — EventSource → resolveSuspense"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/sx/sse.py")
|
||||
(td :class "px-3 py-2 text-stone-700" "SSE helpers — event formatting, stream response"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/static/scripts/sx-ws.js")
|
||||
(td :class "px-3 py-2 text-stone-700" "WebSocket client — bidirectional SX channel"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/sx/ref/render.sx")
|
||||
(td :class "px-3 py-2 text-stone-700" "Spec: ~live and ~ws rendering in all modes"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/sx/ref/boundary.sx")
|
||||
(td :class "px-3 py-2 text-stone-700" "SSE/WS IO primitive declarations")))))))
|
||||
|
||||
|
||||
@@ -2,30 +2,57 @@
|
||||
;;
|
||||
;; This page uses :stream true to enable chunked transfer encoding.
|
||||
;; The browser receives the HTML shell immediately with loading skeletons,
|
||||
;; then the content fills in when the (deliberately slow) data resolves.
|
||||
;; then content fills in as each IO resolves at staggered intervals.
|
||||
;;
|
||||
;; The :data expression simulates 1.5s IO delay. Without streaming, the
|
||||
;; browser would wait the full 1.5s before seeing anything. With streaming,
|
||||
;; the page skeleton appears instantly.
|
||||
;; The :data expression is an async generator that yields three chunks
|
||||
;; at 1s, 3s, and 5s. Each chunk resolves a different ~suspense slot.
|
||||
|
||||
(defcomp ~streaming-demo-content (&key streamed-at message items)
|
||||
;; Color map for stream chunk styling (all string keys for get compatibility)
|
||||
(define stream-colors
|
||||
{"green" {"border" "border-green-200" "bg" "bg-green-50" "title" "text-green-900"
|
||||
"text" "text-green-800" "sub" "text-green-700" "code" "bg-green-100"
|
||||
"dot" "bg-green-400"}
|
||||
"blue" {"border" "border-blue-200" "bg" "bg-blue-50" "title" "text-blue-900"
|
||||
"text" "text-blue-800" "sub" "text-blue-700" "code" "bg-blue-100"
|
||||
"dot" "bg-blue-400"}
|
||||
"amber" {"border" "border-amber-200" "bg" "bg-amber-50" "title" "text-amber-900"
|
||||
"text" "text-amber-800" "sub" "text-amber-700" "code" "bg-amber-100"
|
||||
"dot" "bg-amber-400"}})
|
||||
|
||||
;; Generic streamed content chunk — rendered once per yield from the
|
||||
;; async generator. The :content expression receives different bindings
|
||||
;; each time, and the _stream_id determines which ~suspense slot it fills.
|
||||
(defcomp ~streaming-demo-chunk (&key stream-label stream-color stream-message stream-time)
|
||||
(let ((colors (get stream-colors stream-color)))
|
||||
(div :class (str "rounded-lg border p-5 space-y-3 " (get colors "border") " " (get colors "bg"))
|
||||
(div :class "flex items-center gap-2"
|
||||
(div :class (str "w-3 h-3 rounded-full " (get colors "dot")))
|
||||
(h2 :class (str "text-lg font-semibold " (get colors "title")) stream-label))
|
||||
(p :class (get colors "text") stream-message)
|
||||
(p :class (str "text-sm " (get colors "sub"))
|
||||
"Resolved at: " (code :class (str "px-1 rounded " (get colors "code")) stream-time)))))
|
||||
|
||||
;; Skeleton placeholder for a stream slot
|
||||
(defcomp ~stream-skeleton ()
|
||||
(div :class "rounded-lg border border-stone-200 bg-stone-50 p-5 space-y-3 animate-pulse"
|
||||
(div :class "flex items-center gap-2"
|
||||
(div :class "w-3 h-3 rounded-full bg-stone-300")
|
||||
(div :class "h-6 bg-stone-200 rounded w-1/3"))
|
||||
(div :class "h-4 bg-stone-200 rounded w-2/3")
|
||||
(div :class "h-4 bg-stone-200 rounded w-1/2")))
|
||||
|
||||
;; Static layout — takes &rest children where the three suspense slots go.
|
||||
(defcomp ~streaming-demo-layout (&rest children)
|
||||
(div :class "space-y-8"
|
||||
(div :class "border-b border-stone-200 pb-6"
|
||||
(h1 :class "text-2xl font-bold text-stone-900" "Streaming & Suspense Demo")
|
||||
(p :class "mt-2 text-stone-600"
|
||||
"This page uses " (code :class "bg-stone-100 px-1 rounded text-violet-700" ":stream true")
|
||||
" in its defpage declaration. The browser receives the page skeleton instantly, "
|
||||
"then content fills in as IO resolves."))
|
||||
"then three IO sources resolve at staggered intervals (1s, 3s, 5s)."))
|
||||
|
||||
;; Timestamp proves this was streamed
|
||||
(div :class "rounded-lg border border-green-200 bg-green-50 p-5 space-y-3"
|
||||
(h2 :class "text-lg font-semibold text-green-900" "Streamed Content")
|
||||
(p :class "text-green-800" message)
|
||||
(p :class "text-green-700 text-sm"
|
||||
"Data resolved at: " (code :class "bg-green-100 px-1 rounded" streamed-at))
|
||||
(p :class "text-green-700 text-sm"
|
||||
"This content arrived via a " (code :class "bg-green-100 px-1 rounded" "<script>__sxResolve(...)</script>")
|
||||
" chunk streamed after the initial HTML shell."))
|
||||
;; Slot: suspense placeholders (or resolved content)
|
||||
(div :class "grid gap-4" children)
|
||||
|
||||
;; Flow diagram
|
||||
(div :class "space-y-4"
|
||||
@@ -36,25 +63,31 @@
|
||||
(div :class "flex-shrink-0 w-8 h-8 rounded-full bg-violet-100 flex items-center justify-center text-violet-700 font-bold text-sm"
|
||||
(get item "label"))
|
||||
(p :class "text-stone-700 text-sm pt-1" (get item "detail"))))
|
||||
items)))
|
||||
(list
|
||||
{:label "Shell" :detail "HTML shell with three suspense placeholders sent immediately"}
|
||||
{:label "Boot" :detail "sx-browser.js loads, renders fallback skeletons"}
|
||||
{:label "1s" :detail "Fast API responds — first skeleton replaced with green box"}
|
||||
{:label "3s" :detail "Database query completes — second skeleton replaced with blue box"}
|
||||
{:label "5s" :detail "ML inference finishes — third skeleton replaced with amber box"}))))
|
||||
|
||||
;; How it works
|
||||
(div :class "rounded-lg border border-blue-200 bg-blue-50 p-5 space-y-3"
|
||||
(h2 :class "text-lg font-semibold text-blue-900" "How Streaming Works")
|
||||
(ol :class "list-decimal list-inside text-blue-800 space-y-2 text-sm"
|
||||
(li "Server starts data fetch and header fetch " (em "concurrently"))
|
||||
(li "HTML shell with " (code "~suspense") " placeholders is sent immediately")
|
||||
(li "Browser loads sx-browser.js, renders the page with loading skeletons")
|
||||
(li "Data IO completes — server sends " (code "<script>__sxResolve(\"stream-content\", ...)</script>"))
|
||||
(li "sx.js calls " (code "Sx.resolveSuspense()") " — replaces skeleton with real content")
|
||||
(li "Header IO completes — same process for header area")))
|
||||
(div :class "rounded-lg border border-violet-200 bg-violet-50 p-5 space-y-3"
|
||||
(h2 :class "text-lg font-semibold text-violet-900" "How Multi-Stream Works")
|
||||
(ol :class "list-decimal list-inside text-violet-800 space-y-2 text-sm"
|
||||
(li "Server evaluates " (code ":data") " — gets an " (em "async generator"))
|
||||
(li "HTML shell with three " (code "~suspense") " placeholders sent immediately")
|
||||
(li "Generator yields first chunk after 1s — server sends " (code "__sxResolve(\"stream-fast\", ...)"))
|
||||
(li "Generator yields second chunk after 3s — " (code "__sxResolve(\"stream-medium\", ...)"))
|
||||
(li "Generator yields third chunk after 5s — " (code "__sxResolve(\"stream-slow\", ...)"))
|
||||
(li "Each resolve replaces its skeleton independently")))
|
||||
|
||||
;; Technical details
|
||||
(div :class "rounded-lg border border-amber-200 bg-amber-50 p-4 text-sm space-y-2"
|
||||
(p :class "font-semibold text-amber-800" "Implementation details")
|
||||
(ul :class "list-disc list-inside text-amber-700 space-y-1"
|
||||
(li (code "defpage :stream true") " — opts the page into streaming response")
|
||||
(li (code "~suspense :id \"...\" :fallback (...)") " — renders loading skeleton until resolved")
|
||||
(li "Quart async generator response — yields chunks as they become available")
|
||||
(li "Resolution via " (code "__sxResolve(id, sx)") " inline scripts in the stream")
|
||||
(li "Falls back to standard (non-streaming) response for SX/HTMX requests")))))
|
||||
(div :class "rounded-lg border border-stone-200 bg-stone-50 p-4 text-sm space-y-2"
|
||||
(p :class "font-semibold text-stone-800" "Implementation details")
|
||||
(ul :class "list-disc list-inside text-stone-600 space-y-1"
|
||||
(li (code "defpage :stream true") " — opts the page into chunked transfer encoding")
|
||||
(li (code ":data") " helper is an async generator — each " (code "yield") " resolves a different suspense slot")
|
||||
(li "Each yield includes " (code "_stream_id") " matching a " (code "~suspense :id") " in the shell")
|
||||
(li (code ":content") " expression is re-evaluated with each yield's bindings")
|
||||
(li "Headers stream concurrently — independent of the data generator")
|
||||
(li "Future: SSE/WebSocket for re-resolving slots after initial page load")))))
|
||||
|
||||
@@ -468,15 +468,16 @@
|
||||
:sub-href "/isomorphism/"
|
||||
:sub-nav (~section-nav :items isomorphism-nav-items :current "Streaming")
|
||||
:selected "Streaming")
|
||||
:fallback (div :class "p-8 space-y-4 animate-pulse"
|
||||
(div :class "h-8 bg-stone-200 rounded w-1/3")
|
||||
(div :class "h-4 bg-stone-200 rounded w-2/3")
|
||||
(div :class "h-64 bg-stone-200 rounded"))
|
||||
:shell (~streaming-demo-layout
|
||||
(~suspense :id "stream-fast" :fallback (~stream-skeleton))
|
||||
(~suspense :id "stream-medium" :fallback (~stream-skeleton))
|
||||
(~suspense :id "stream-slow" :fallback (~stream-skeleton)))
|
||||
:data (streaming-demo-data)
|
||||
:content (~streaming-demo-content
|
||||
:streamed-at streamed-at
|
||||
:message message
|
||||
:items items))
|
||||
:content (~streaming-demo-chunk
|
||||
:stream-label stream-label
|
||||
:stream-color stream-color
|
||||
:stream-message stream-message
|
||||
:stream-time stream-time))
|
||||
|
||||
;; Wildcard must come AFTER specific routes (first-match routing)
|
||||
(defpage isomorphism-page
|
||||
@@ -534,6 +535,7 @@
|
||||
"social-sharing" (~plan-social-sharing-content)
|
||||
"sx-ci" (~plan-sx-ci-content)
|
||||
"cssx-components" (~plan-cssx-components-content)
|
||||
"live-streaming" (~plan-live-streaming-content)
|
||||
:else (~plans-index-content)))
|
||||
|
||||
;; ---------------------------------------------------------------------------
|
||||
|
||||
@@ -838,19 +838,40 @@ def _data_test_data() -> dict:
|
||||
}
|
||||
|
||||
|
||||
async def _streaming_demo_data() -> dict:
|
||||
"""Simulate slow IO for streaming demo — 1.5s delay."""
|
||||
async def _streaming_demo_data():
|
||||
"""Multi-stream demo — yields three chunks at staggered intervals.
|
||||
|
||||
Each yield is a dict with _stream_id (matching a ~suspense :id in the
|
||||
shell) plus bindings for the :content expression. The streaming
|
||||
infrastructure detects the async generator and resolves each suspense
|
||||
placeholder as each chunk arrives.
|
||||
"""
|
||||
import asyncio
|
||||
await asyncio.sleep(1.5)
|
||||
from datetime import datetime, timezone
|
||||
return {
|
||||
"streamed-at": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
"message": "This content was streamed after a 1.5 second delay.",
|
||||
"items": [
|
||||
{"label": "Shell", "detail": "HTML shell with suspense placeholders sent immediately"},
|
||||
{"label": "Bootstrap", "detail": "sx-browser.js loads, renders fallback skeletons"},
|
||||
{"label": "IO Start", "detail": "Data fetch and header fetch run concurrently"},
|
||||
{"label": "Resolve", "detail": "As each IO completes, <script> chunk replaces placeholder"},
|
||||
{"label": "Done", "detail": "Page fully rendered — all suspense resolved"},
|
||||
],
|
||||
|
||||
await asyncio.sleep(1)
|
||||
yield {
|
||||
"stream-id": "stream-fast",
|
||||
"stream-label": "Fast API",
|
||||
"stream-color": "green",
|
||||
"stream-message": "Responded in ~1 second",
|
||||
"stream-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
}
|
||||
|
||||
await asyncio.sleep(2) # 3s total
|
||||
yield {
|
||||
"stream-id": "stream-medium",
|
||||
"stream-label": "Database Query",
|
||||
"stream-color": "blue",
|
||||
"stream-message": "Query completed in ~3 seconds",
|
||||
"stream-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
}
|
||||
|
||||
await asyncio.sleep(2) # 5s total
|
||||
yield {
|
||||
"stream-id": "stream-slow",
|
||||
"stream-label": "ML Inference",
|
||||
"stream-color": "amber",
|
||||
"stream-message": "Model inference completed in ~5 seconds",
|
||||
"stream-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user