Compare commits
37 Commits
1fe258e3f7
...
179631130c
| Author | SHA1 | Date | |
|---|---|---|---|
| 179631130c | |||
| 5a4a0c0e1c | |||
| 621c0bbf42 | |||
| 5a68046bd8 | |||
| df1aa4e1d1 | |||
| 41c3b9f3b8 | |||
| f5e47678d5 | |||
| 6596fac758 | |||
| 299de98ea8 | |||
| e7a511d40a | |||
| aeac3c0b13 | |||
| 25edc7d64a | |||
| 5cca22ae6d | |||
| 260475a4da | |||
| 2c9d7c95a2 | |||
| fd03eeb0fe | |||
| 47448a6d37 | |||
| cdd775c999 | |||
| 7294f07f5b | |||
| dd774efc18 | |||
| 668a46bec0 | |||
| 9d70599416 | |||
| 309579aec7 | |||
| ca0ea69ca1 | |||
| 44095c0a04 | |||
| 5991a5b397 | |||
| b9b315c86f | |||
| ccf9a155ad | |||
| fa70c5f297 | |||
| 3574f7e163 | |||
| 6312eb66a2 | |||
| 917a487195 | |||
| 605aafa2eb | |||
| 7f466f0fd6 | |||
| 6421a23223 | |||
| 342da2bd44 | |||
| a05d642461 |
@@ -14,7 +14,7 @@
|
||||
// =========================================================================
|
||||
|
||||
var NIL = Object.freeze({ _nil: true, toString: function() { return "nil"; } });
|
||||
var SX_VERSION = "2026-03-07T09:51:42Z";
|
||||
var SX_VERSION = "2026-03-07T21:45:27Z";
|
||||
|
||||
function isNil(x) { return x === NIL || x === null || x === undefined; }
|
||||
function isSxTruthy(x) { return x !== false && !isNil(x); }
|
||||
@@ -2007,6 +2007,13 @@ return domAppendToHead(link); }, domQueryAll(container, "link[rel=\"stylesheet\"
|
||||
// page-data-cache-set
|
||||
var pageDataCacheSet = function(cacheKey, data) { return dictSet(_pageDataCache, cacheKey, {"data": data, "ts": nowMs()}); };
|
||||
|
||||
// current-page-layout
|
||||
var currentPageLayout = function() { return (function() {
|
||||
var pathname = urlPathname(browserLocationHref());
|
||||
var match = findMatchingRoute(pathname, _pageRoutes);
|
||||
return (isSxTruthy(isNil(match)) ? "" : sxOr(get(match, "layout"), ""));
|
||||
})(); };
|
||||
|
||||
// swap-rendered-content
|
||||
var swapRenderedContent = function(target, rendered, pathname) { return (domSetTextContent(target, ""), domAppend(target, rendered), hoistHeadElementsFull(target), processElements(target), sxHydrateElements(target), domDispatch(target, "sx:clientRoute", {["pathname"]: pathname}), logInfo((String("sx:route client ") + String(pathname)))); };
|
||||
|
||||
@@ -2024,6 +2031,9 @@ return domAppendToHead(link); }, domQueryAll(container, "link[rel=\"stylesheet\"
|
||||
var tryClientRoute = function(pathname, targetSel) { return (function() {
|
||||
var match = findMatchingRoute(pathname, _pageRoutes);
|
||||
return (isSxTruthy(isNil(match)) ? (logInfo((String("sx:route no match (") + String(len(_pageRoutes)) + String(" routes) ") + String(pathname))), false) : (function() {
|
||||
var targetLayout = sxOr(get(match, "layout"), "");
|
||||
var curLayout = currentPageLayout();
|
||||
return (isSxTruthy(!isSxTruthy((targetLayout == curLayout))) ? (logInfo((String("sx:route server (layout: ") + String(curLayout) + String(" -> ") + String(targetLayout) + String(") ") + String(pathname))), false) : (function() {
|
||||
var contentSrc = get(match, "content");
|
||||
var closure = sxOr(get(match, "closure"), {});
|
||||
var params = get(match, "params");
|
||||
@@ -2036,7 +2046,7 @@ return domAppendToHead(link); }, domQueryAll(container, "link[rel=\"stylesheet\"
|
||||
if (isSxTruthy(hasIo)) {
|
||||
registerIoDeps(ioDeps);
|
||||
}
|
||||
return (isSxTruthy(get(match, "has-data")) ? (function() {
|
||||
return (isSxTruthy(get(match, "stream")) ? (logInfo((String("sx:route streaming ") + String(pathname))), fetchStreaming(target, pathname, buildRequestHeaders(target, loadedComponentNames(), _cssHash)), true) : (isSxTruthy(get(match, "has-data")) ? (function() {
|
||||
var cacheKey = pageDataCacheKey(pageName, params);
|
||||
var cached = pageDataCacheGet(cacheKey);
|
||||
return (isSxTruthy(cached) ? (function() {
|
||||
@@ -2057,8 +2067,9 @@ return (function() {
|
||||
var env = merge(closure, params);
|
||||
var rendered = tryEvalContent(contentSrc, env);
|
||||
return (isSxTruthy(isNil(rendered)) ? (logInfo((String("sx:route server (eval failed) ") + String(pathname))), false) : (swapRenderedContent(target, rendered, pathname), true));
|
||||
})())));
|
||||
})()));
|
||||
})()));
|
||||
})());
|
||||
})());
|
||||
})());
|
||||
})(); };
|
||||
@@ -2367,6 +2378,21 @@ allKf = concat(allKf, styleValueKeyframes_(sv)); } }
|
||||
processElements(el);
|
||||
return sxHydrateElements(el);
|
||||
})() : NIL);
|
||||
})(); };
|
||||
|
||||
// resolve-suspense
|
||||
var resolveSuspense = function(id, sx) { processSxScripts(NIL);
|
||||
return (function() {
|
||||
var el = domQuery((String("[data-suspense=\"") + String(id) + String("\"]")));
|
||||
return (isSxTruthy(el) ? (function() {
|
||||
var exprs = parse(sx);
|
||||
var env = getRenderEnv(NIL);
|
||||
domSetTextContent(el, "");
|
||||
{ var _c = exprs; for (var _i = 0; _i < _c.length; _i++) { var expr = _c[_i]; domAppend(el, renderToDom(expr, env, NIL)); } }
|
||||
processElements(el);
|
||||
sxHydrateElements(el);
|
||||
return domDispatch(el, "sx:resolved", {"id": id});
|
||||
})() : logWarn((String("resolveSuspense: no element for id=") + String(id))));
|
||||
})(); };
|
||||
|
||||
// sx-hydrate-elements
|
||||
@@ -2567,7 +2593,7 @@ callExpr.push(dictGet(kwargs, k)); } }
|
||||
|
||||
function domCreateElement(tag, ns) {
|
||||
if (!_hasDom) return null;
|
||||
if (ns) return document.createElementNS(ns, tag);
|
||||
if (ns && ns !== NIL) return document.createElementNS(ns, tag);
|
||||
return document.createElement(tag);
|
||||
}
|
||||
|
||||
@@ -3007,6 +3033,134 @@ callExpr.push(dictGet(kwargs, k)); } }
|
||||
}).catch(function() { location.reload(); });
|
||||
}
|
||||
|
||||
function fetchStreaming(target, url, headers) {
|
||||
// Streaming fetch for multi-stream pages.
|
||||
// First chunk = OOB SX swap (shell with skeletons).
|
||||
// Subsequent chunks = __sxResolve script tags filling suspense slots.
|
||||
var opts = { headers: headers };
|
||||
try {
|
||||
var h = new URL(url, location.href).hostname;
|
||||
if (h !== location.hostname &&
|
||||
(h.indexOf(".rose-ash.com") >= 0 || h.indexOf(".localhost") >= 0)) {
|
||||
opts.credentials = "include";
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
fetch(url, opts).then(function(resp) {
|
||||
if (!resp.ok || !resp.body) {
|
||||
// Fallback: non-streaming
|
||||
return resp.text().then(function(text) {
|
||||
text = stripComponentScripts(text);
|
||||
text = extractResponseCss(text);
|
||||
text = text.trim();
|
||||
if (text.charAt(0) === "(") {
|
||||
var dom = sxRender(text);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var reader = resp.body.getReader();
|
||||
var decoder = new TextDecoder();
|
||||
var buffer = "";
|
||||
var initialSwapDone = false;
|
||||
// Regex to match __sxResolve script tags
|
||||
var RESOLVE_START = "<script>window.__sxResolve&&window.__sxResolve(";
|
||||
var RESOLVE_END = ")</script>";
|
||||
|
||||
function processResolveScripts() {
|
||||
// Strip and load any extra component defs before resolve scripts
|
||||
buffer = stripSxScripts(buffer);
|
||||
var idx;
|
||||
while ((idx = buffer.indexOf(RESOLVE_START)) >= 0) {
|
||||
var endIdx = buffer.indexOf(RESOLVE_END, idx);
|
||||
if (endIdx < 0) break; // incomplete, wait for more data
|
||||
var argsStr = buffer.substring(idx + RESOLVE_START.length, endIdx);
|
||||
buffer = buffer.substring(endIdx + RESOLVE_END.length);
|
||||
// argsStr is: "stream-id","sx source"
|
||||
var commaIdx = argsStr.indexOf(",");
|
||||
if (commaIdx >= 0) {
|
||||
try {
|
||||
var id = JSON.parse(argsStr.substring(0, commaIdx));
|
||||
var sx = JSON.parse(argsStr.substring(commaIdx + 1));
|
||||
if (typeof Sx !== "undefined" && Sx.resolveSuspense) {
|
||||
Sx.resolveSuspense(id, sx);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[sx-ref] resolve parse error:", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function pump() {
|
||||
return reader.read().then(function(result) {
|
||||
buffer += decoder.decode(result.value || new Uint8Array(), { stream: !result.done });
|
||||
|
||||
if (!initialSwapDone) {
|
||||
// Look for the first resolve script — everything before it is OOB content
|
||||
var scriptIdx = buffer.indexOf("<script>window.__sxResolve");
|
||||
// If we found a script tag, or the stream is done, process OOB
|
||||
var oobEnd = scriptIdx >= 0 ? scriptIdx : (result.done ? buffer.length : -1);
|
||||
if (oobEnd >= 0) {
|
||||
var oobContent = buffer.substring(0, oobEnd);
|
||||
buffer = buffer.substring(oobEnd);
|
||||
initialSwapDone = true;
|
||||
|
||||
// Process OOB SX content (same as fetchAndRestore)
|
||||
oobContent = stripComponentScripts(oobContent);
|
||||
// Also strip bare <script type="text/sx"> (extra defs from resolve chunks)
|
||||
oobContent = stripSxScripts(oobContent);
|
||||
oobContent = extractResponseCss(oobContent);
|
||||
oobContent = oobContent.trim();
|
||||
if (oobContent.charAt(0) === "(") {
|
||||
try {
|
||||
var dom = sxRender(oobContent);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
// Dispatch clientRoute so nav links update active state
|
||||
domDispatch(target, "sx:clientRoute",
|
||||
{ pathname: new URL(url, location.href).pathname });
|
||||
} catch (err) {
|
||||
console.error("[sx-ref] streaming OOB swap error:", err);
|
||||
}
|
||||
}
|
||||
// Process any resolve scripts already in buffer
|
||||
processResolveScripts();
|
||||
}
|
||||
} else {
|
||||
// Process resolve scripts as they arrive
|
||||
processResolveScripts();
|
||||
}
|
||||
|
||||
if (!result.done) return pump();
|
||||
});
|
||||
}
|
||||
|
||||
return pump();
|
||||
}).catch(function(err) {
|
||||
console.error("[sx-ref] streaming fetch error:", err);
|
||||
location.reload();
|
||||
});
|
||||
}
|
||||
|
||||
function fetchPreload(url, headers, cache) {
|
||||
fetch(url, { headers: headers }).then(function(resp) {
|
||||
if (!resp.ok) return;
|
||||
@@ -3471,6 +3625,14 @@ callExpr.push(dictGet(kwargs, k)); } }
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function stripSxScripts(text) {
|
||||
// Strip <script type="text/sx">...</script> (without data-components).
|
||||
// These contain extra component defs from streaming resolve chunks.
|
||||
var SxObj = typeof Sx !== "undefined" ? Sx : null;
|
||||
return text.replace(/<script[^>]*type="text\/sx"[^>]*>([\s\S]*?)<\/script>/gi,
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function extractResponseCss(text) {
|
||||
if (!_hasDom) return text;
|
||||
var target = document.getElementById("sx-css");
|
||||
@@ -4480,6 +4642,12 @@ callExpr.push(dictGet(kwargs, k)); } }
|
||||
renderToSx: function(expr, env) { return renderToSx(expr, env || merge(componentEnv)); },
|
||||
renderToDom: _hasDom ? function(expr, env, ns) { return renderToDom(expr, env || merge(componentEnv), ns || null); } : null,
|
||||
parseTriggerSpec: typeof parseTriggerSpec === "function" ? parseTriggerSpec : null,
|
||||
parseTime: typeof parseTime === "function" ? parseTime : null,
|
||||
defaultTrigger: typeof defaultTrigger === "function" ? defaultTrigger : null,
|
||||
parseSwapSpec: typeof parseSwapSpec === "function" ? parseSwapSpec : null,
|
||||
parseRetrySpec: typeof parseRetrySpec === "function" ? parseRetrySpec : null,
|
||||
nextRetryMs: typeof nextRetryMs === "function" ? nextRetryMs : null,
|
||||
filterParams: typeof filterParams === "function" ? filterParams : null,
|
||||
morphNode: typeof morphNode === "function" ? morphNode : null,
|
||||
morphChildren: typeof morphChildren === "function" ? morphChildren : null,
|
||||
swapDomNodes: typeof swapDomNodes === "function" ? swapDomNodes : null,
|
||||
@@ -4492,6 +4660,7 @@ callExpr.push(dictGet(kwargs, k)); } }
|
||||
update: typeof sxUpdateElement === "function" ? sxUpdateElement : null,
|
||||
renderComponent: typeof sxRenderComponent === "function" ? sxRenderComponent : null,
|
||||
getEnv: function() { return componentEnv; },
|
||||
resolveSuspense: typeof resolveSuspense === "function" ? resolveSuspense : null,
|
||||
init: typeof bootInit === "function" ? bootInit : null,
|
||||
splitPathSegments: splitPathSegments,
|
||||
parseRoutePattern: parseRoutePattern,
|
||||
@@ -4514,7 +4683,18 @@ callExpr.push(dictGet(kwargs, k)); } }
|
||||
|
||||
// --- Auto-init ---
|
||||
if (typeof document !== "undefined") {
|
||||
var _sxInit = function() { bootInit(); };
|
||||
var _sxInit = function() {
|
||||
bootInit();
|
||||
// Process any suspense resolutions that arrived before init
|
||||
if (global.__sxPending) {
|
||||
for (var pi = 0; pi < global.__sxPending.length; pi++) {
|
||||
resolveSuspense(global.__sxPending[pi].id, global.__sxPending[pi].sx);
|
||||
}
|
||||
global.__sxPending = null;
|
||||
}
|
||||
// Set up direct resolution for future chunks
|
||||
global.__sxResolve = function(id, sx) { resolveSuspense(id, sx); };
|
||||
};
|
||||
if (document.readyState === "loading") {
|
||||
document.addEventListener("DOMContentLoaded", _sxInit);
|
||||
} else {
|
||||
|
||||
@@ -131,6 +131,35 @@
|
||||
}
|
||||
}
|
||||
|
||||
function loadDepsFromBootstrap(env) {
|
||||
if (Sx.scanRefs) {
|
||||
env["scan-refs"] = Sx.scanRefs;
|
||||
env["scan-components-from-source"] = Sx.scanComponentsFromSource;
|
||||
env["transitive-deps"] = Sx.transitiveDeps;
|
||||
env["compute-all-deps"] = Sx.computeAllDeps;
|
||||
env["components-needed"] = Sx.componentsNeeded;
|
||||
env["page-component-bundle"] = Sx.pageComponentBundle;
|
||||
env["page-css-classes"] = Sx.pageCssClasses;
|
||||
env["scan-io-refs"] = Sx.scanIoRefs;
|
||||
env["transitive-io-refs"] = Sx.transitiveIoRefs;
|
||||
env["compute-all-io-refs"] = Sx.computeAllIoRefs;
|
||||
env["component-pure?"] = Sx.componentPure_p;
|
||||
env["test-env"] = function() { return env; };
|
||||
}
|
||||
}
|
||||
|
||||
function loadEngineFromBootstrap(env) {
|
||||
if (Sx.parseTime) {
|
||||
env["parse-time"] = Sx.parseTime;
|
||||
env["parse-trigger-spec"] = Sx.parseTriggerSpec;
|
||||
env["default-trigger"] = Sx.defaultTrigger;
|
||||
env["parse-swap-spec"] = Sx.parseSwapSpec;
|
||||
env["parse-retry-spec"] = Sx.parseRetrySpec;
|
||||
env["next-retry-ms"] = function(cur, cap) { return Math.min(cur * 2, cap); };
|
||||
env["filter-params"] = Sx.filterParams;
|
||||
}
|
||||
}
|
||||
|
||||
// --- Legacy runner (monolithic test.sx) ---
|
||||
window.sxRunTests = function(srcId, outId, btnId) {
|
||||
var src = document.getElementById(srcId).textContent;
|
||||
@@ -169,6 +198,8 @@
|
||||
"parser": { needs: ["sx-parse"] },
|
||||
"router": { needs: [] },
|
||||
"render": { needs: ["render-html"] },
|
||||
"deps": { needs: [] },
|
||||
"engine": { needs: [] },
|
||||
};
|
||||
|
||||
window.sxRunModularTests = function(specName, outId, btnId) {
|
||||
@@ -190,8 +221,10 @@
|
||||
var sn = specs[si];
|
||||
if (!SPECS[sn]) continue;
|
||||
|
||||
// Load router from bootstrap if needed
|
||||
// Load module functions from bootstrap
|
||||
if (sn === "router") loadRouterFromBootstrap(ctx.env);
|
||||
if (sn === "deps") loadDepsFromBootstrap(ctx.env);
|
||||
if (sn === "engine") loadEngineFromBootstrap(ctx.env);
|
||||
|
||||
// Find spec source — either per-spec textarea or embedded in overview
|
||||
var specEl = document.getElementById("test-spec-" + sn);
|
||||
|
||||
@@ -1588,6 +1588,34 @@
|
||||
isTruthy: isSxTruthy,
|
||||
isNil: isNil,
|
||||
|
||||
/**
|
||||
* Resolve a streaming suspense placeholder.
|
||||
* Called by inline <script> tags that arrive during chunked transfer:
|
||||
* __sxResolve("content", "(~article :title \"Hello\")")
|
||||
*
|
||||
* Finds the suspense wrapper by data-suspense attribute, renders the
|
||||
* new SX content, and replaces the wrapper's children.
|
||||
*/
|
||||
resolveSuspense: function (id, sx) {
|
||||
// Process any new <script type="text/sx"> tags (streaming extras)
|
||||
Sx.processScripts();
|
||||
var el = document.querySelector('[data-suspense="' + id + '"]');
|
||||
if (!el) {
|
||||
console.warn("[sx] resolveSuspense: no element for id=" + id);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
var node = Sx.render(sx);
|
||||
el.textContent = "";
|
||||
el.appendChild(node);
|
||||
if (typeof SxEngine !== "undefined") SxEngine.process(el);
|
||||
Sx.hydrate(el);
|
||||
el.dispatchEvent(new CustomEvent("sx:resolved", { bubbles: true, detail: { id: id } }));
|
||||
} catch (e) {
|
||||
console.error("[sx] resolveSuspense error for id=" + id, e);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Mount a sx expression into a DOM element, replacing its contents.
|
||||
* Sx.mount(el, '(~card :title "Hi")')
|
||||
@@ -3164,6 +3192,15 @@
|
||||
Sx.processScripts();
|
||||
Sx.hydrate();
|
||||
SxEngine.process();
|
||||
// Process any streaming suspense resolutions that arrived before init
|
||||
if (global.__sxPending) {
|
||||
for (var pi = 0; pi < global.__sxPending.length; pi++) {
|
||||
Sx.resolveSuspense(global.__sxPending[pi].id, global.__sxPending[pi].sx);
|
||||
}
|
||||
global.__sxPending = null;
|
||||
}
|
||||
// Replace bootstrap resolver with direct calls
|
||||
global.__sxResolve = function (id, sx) { Sx.resolveSuspense(id, sx); };
|
||||
};
|
||||
if (document.readyState === "loading") {
|
||||
document.addEventListener("DOMContentLoaded", init);
|
||||
|
||||
@@ -5,6 +5,10 @@ module.exports = {
|
||||
// Levels 1–4 produce shades 400–100 (level 5+ yields 0 or negative = no match)
|
||||
{ pattern: /^bg-sky-(100|200|300|400|500)$/ },
|
||||
{ pattern: /^bg-violet-(100|200|300|400|500)$/ },
|
||||
// Streaming demo: ~streaming-demo-chunk builds classes dynamically via (str ...)
|
||||
// from a color map — Tailwind scanner can't detect these in string literals.
|
||||
{ pattern: /^(bg|text|border)-(green|blue|amber)-(50|100|200|300|400|500|600|700|800|900)$/ },
|
||||
'w-3', 'h-3', 'w-1/3', 'w-2/3', 'w-1/2', 'w-3/4', 'animate-pulse',
|
||||
],
|
||||
content: [
|
||||
'/root/rose-ash/shared/sx/templates/**/*.sx',
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -983,6 +983,13 @@ def _sf_defpage(expr: list, env: dict) -> PageDef:
|
||||
if isinstance(cache_result, dict):
|
||||
cache = cache_result
|
||||
|
||||
# Stream — evaluate (it's a static boolean)
|
||||
stream_val = slots.get("stream")
|
||||
stream = False
|
||||
if stream_val is not None:
|
||||
stream_result = _trampoline(_eval(stream_val, env))
|
||||
stream = bool(stream_result)
|
||||
|
||||
page = PageDef(
|
||||
name=name_sym.name,
|
||||
path=path,
|
||||
@@ -994,6 +1001,9 @@ def _sf_defpage(expr: list, env: dict) -> PageDef:
|
||||
filter_expr=slots.get("filter"),
|
||||
aside_expr=slots.get("aside"),
|
||||
menu_expr=slots.get("menu"),
|
||||
stream=stream,
|
||||
fallback_expr=slots.get("fallback"),
|
||||
shell_expr=slots.get("shell"),
|
||||
closure=dict(env),
|
||||
)
|
||||
env[f"page:{name_sym.name}"] = page
|
||||
|
||||
@@ -704,6 +704,31 @@ def _build_pages_sx(service: str) -> str:
|
||||
if io_deps else "()"
|
||||
)
|
||||
|
||||
# Extract layout identity for client-side routing.
|
||||
# When layout changes between pages, client routing falls through
|
||||
# to server so OOB header updates are applied.
|
||||
layout_id = ""
|
||||
if isinstance(page_def.layout, str):
|
||||
layout_id = page_def.layout
|
||||
elif isinstance(page_def.layout, list):
|
||||
from .types import Keyword as _Kw, Symbol as _Sym
|
||||
first = page_def.layout[0]
|
||||
if isinstance(first, _Kw):
|
||||
layout_id = first.name
|
||||
elif isinstance(first, _Sym):
|
||||
layout_id = first.name
|
||||
else:
|
||||
layout_id = str(first)
|
||||
# Append section kwarg to distinguish same-layout-type
|
||||
# with different sections (e.g. sx-section+Docs vs sx-section+Testing)
|
||||
raw_layout = page_def.layout
|
||||
for li in range(1, len(raw_layout) - 1):
|
||||
if isinstance(raw_layout[li], _Kw) and raw_layout[li].name == "section":
|
||||
val = raw_layout[li + 1]
|
||||
if val is not None:
|
||||
layout_id = f"{layout_id}:{val}"
|
||||
break
|
||||
|
||||
# Build closure as SX dict
|
||||
closure_parts: list[str] = []
|
||||
for k, v in page_def.closure.items():
|
||||
@@ -711,11 +736,15 @@ def _build_pages_sx(service: str) -> str:
|
||||
closure_parts.append(f":{k} {_sx_literal(v)}")
|
||||
closure_sx = "{" + " ".join(closure_parts) + "}"
|
||||
|
||||
stream = "true" if page_def.stream else "false"
|
||||
|
||||
entry = (
|
||||
"{:name " + _sx_literal(page_def.name)
|
||||
+ " :path " + _sx_literal(page_def.path)
|
||||
+ " :auth " + _sx_literal(auth)
|
||||
+ " :has-data " + has_data
|
||||
+ " :stream " + stream
|
||||
+ " :layout " + _sx_literal(layout_id)
|
||||
+ " :io-deps " + io_deps_sx
|
||||
+ " :content " + _sx_literal(content_src)
|
||||
+ " :deps " + deps_sx
|
||||
@@ -826,6 +855,139 @@ def sx_page(ctx: dict, page_sx: str, *,
|
||||
)
|
||||
|
||||
|
||||
_SX_STREAMING_RESOLVE = """\
|
||||
<script>window.__sxResolve&&window.__sxResolve({id},{sx})</script>"""
|
||||
|
||||
_SX_STREAMING_BOOTSTRAP = """\
|
||||
<script>window.__sxPending=[];window.__sxResolve=function(i,s){\
|
||||
if(window.Sx&&Sx.resolveSuspense){Sx.resolveSuspense(i,s)}\
|
||||
else{window.__sxPending.push({id:i,sx:s})}}</script>"""
|
||||
|
||||
|
||||
def sx_page_streaming_parts(ctx: dict, page_html: str, *,
|
||||
page_sx: str = "",
|
||||
meta_html: str = "") -> tuple[str, str]:
|
||||
"""Split the page into shell (before scripts) and tail (scripts).
|
||||
|
||||
For streaming, the initial page is rendered to **HTML** server-side so
|
||||
``[data-suspense]`` elements are in the DOM immediately — no client-side
|
||||
SX rendering needed for the shell. Resolution scripts can find and
|
||||
replace suspense placeholders without waiting for sx-browser.js to boot.
|
||||
|
||||
Args:
|
||||
page_html: Server-rendered HTML for the page body (with suspense
|
||||
placeholders already as real HTML elements).
|
||||
page_sx: SX source scanned for component deps (may differ from
|
||||
page_html when components were expanded server-side).
|
||||
"""
|
||||
from .jinja_bridge import components_for_page, css_classes_for_page
|
||||
from .css_registry import lookup_rules, get_preamble, registry_loaded, store_css_hash
|
||||
|
||||
from quart import current_app as _ca
|
||||
|
||||
# Scan the SX source for component deps (needed for resolution scripts
|
||||
# that may contain component calls the client must render)
|
||||
scan_source = page_sx or page_html
|
||||
component_defs, component_hash = components_for_page(scan_source, service=_ca.name)
|
||||
|
||||
client_hash = _get_sx_comp_cookie()
|
||||
if not _is_dev_mode() and client_hash and client_hash == component_hash:
|
||||
component_defs = ""
|
||||
|
||||
sx_css = ""
|
||||
sx_css_classes = ""
|
||||
if registry_loaded():
|
||||
classes = css_classes_for_page(scan_source, service=_ca.name)
|
||||
classes.update(["bg-stone-50", "text-stone-900"])
|
||||
rules = lookup_rules(classes)
|
||||
sx_css = get_preamble() + rules
|
||||
sx_css_classes = store_css_hash(classes)
|
||||
|
||||
asset_url = get_asset_url(ctx)
|
||||
title = ctx.get("base_title", "Rose Ash")
|
||||
csrf = _get_csrf_token()
|
||||
|
||||
styles_hash = _get_style_dict_hash()
|
||||
client_styles_hash = _get_sx_styles_cookie()
|
||||
styles_json = "" if (not _is_dev_mode() and client_styles_hash == styles_hash) else _build_style_dict_json()
|
||||
|
||||
import logging
|
||||
from quart import current_app
|
||||
pages_sx = _build_pages_sx(current_app.name)
|
||||
|
||||
sx_js_hash = _script_hash("sx-browser.js")
|
||||
body_js_hash = _script_hash("body.js")
|
||||
|
||||
# Shell: head + body with server-rendered HTML (not SX mount script)
|
||||
shell = (
|
||||
'<!doctype html>\n<html lang="en">\n<head>\n'
|
||||
'<meta charset="utf-8">\n'
|
||||
'<meta name="viewport" content="width=device-width, initial-scale=1">\n'
|
||||
'<meta name="robots" content="index,follow">\n'
|
||||
'<meta name="theme-color" content="#ffffff">\n'
|
||||
f'<title>{_html_escape(title)}</title>\n'
|
||||
f'{meta_html}'
|
||||
'<style>@media (min-width: 768px) { .js-mobile-sentinel { display:none !important; } }</style>\n'
|
||||
f'<meta name="csrf-token" content="{_html_escape(csrf)}">\n'
|
||||
f'<style id="sx-css">{sx_css}</style>\n'
|
||||
f'<meta name="sx-css-classes" content="{sx_css_classes}">\n'
|
||||
'<script src="https://unpkg.com/prismjs/prism.js"></script>\n'
|
||||
'<script src="https://unpkg.com/prismjs/components/prism-javascript.min.js"></script>\n'
|
||||
'<script src="https://unpkg.com/prismjs/components/prism-python.min.js"></script>\n'
|
||||
'<script src="https://unpkg.com/prismjs/components/prism-bash.min.js"></script>\n'
|
||||
'<script src="https://cdn.jsdelivr.net/npm/sweetalert2@11"></script>\n'
|
||||
"<script>if(matchMedia('(hover:hover) and (pointer:fine)').matches){document.documentElement.classList.add('hover-capable')}</script>\n"
|
||||
"<script>document.addEventListener('click',function(e){var t=e.target.closest('[data-close-details]');if(!t)return;var d=t.closest('details');if(d)d.removeAttribute('open')})</script>\n"
|
||||
'<style>\n'
|
||||
'details[data-toggle-group="mobile-panels"]>summary{list-style:none}\n'
|
||||
'details[data-toggle-group="mobile-panels"]>summary::-webkit-details-marker{display:none}\n'
|
||||
'@media(min-width:768px){.nav-group:focus-within .submenu,.nav-group:hover .submenu{display:block}}\n'
|
||||
'img{max-width:100%;height:auto}\n'
|
||||
'.clamp-2{display:-webkit-box;-webkit-line-clamp:2;-webkit-box-orient:vertical;overflow:hidden}\n'
|
||||
'.clamp-3{display:-webkit-box;-webkit-line-clamp:3;-webkit-box-orient:vertical;overflow:hidden}\n'
|
||||
'.no-scrollbar::-webkit-scrollbar{display:none}.no-scrollbar{-ms-overflow-style:none;scrollbar-width:none}\n'
|
||||
'details.group{overflow:hidden}details.group>summary{list-style:none}details.group>summary::-webkit-details-marker{display:none}\n'
|
||||
'.sx-indicator{display:none}.sx-request .sx-indicator{display:inline-flex}\n'
|
||||
'.sx-error .sx-indicator{display:none}.sx-loading .sx-indicator{display:inline-flex}\n'
|
||||
'.js-wrap.open .js-pop{display:block}.js-wrap.open .js-backdrop{display:block}\n'
|
||||
'</style>\n'
|
||||
'</head>\n'
|
||||
'<body class="bg-stone-50 text-stone-900">\n'
|
||||
f'<script type="text/sx-styles" data-hash="{styles_hash}">{styles_json}</script>\n'
|
||||
f'<script type="text/sx" data-components data-hash="{component_hash}">{component_defs}</script>\n'
|
||||
f'<script type="text/sx-pages">{pages_sx}</script>\n'
|
||||
# Server-rendered HTML — suspense placeholders are real DOM elements
|
||||
f'{page_html}\n'
|
||||
)
|
||||
|
||||
# Tail: bootstrap suspense resolver + scripts + close
|
||||
tail = (
|
||||
_SX_STREAMING_BOOTSTRAP + '\n'
|
||||
f'<script src="{asset_url}/scripts/sx-browser.js?v={sx_js_hash}"></script>\n'
|
||||
f'<script src="{asset_url}/scripts/body.js?v={body_js_hash}"></script>\n'
|
||||
)
|
||||
|
||||
return shell, tail
|
||||
|
||||
|
||||
def sx_streaming_resolve_script(suspension_id: str, sx_source: str,
|
||||
extra_components: str = "") -> str:
|
||||
"""Build a <script> tag that resolves a streaming suspense placeholder.
|
||||
|
||||
If *extra_components* is non-empty, a ``<script type="text/sx">`` block
|
||||
is prepended so the client loads those component defs before resolving.
|
||||
"""
|
||||
import json
|
||||
parts = []
|
||||
if extra_components:
|
||||
parts.append(f'<script type="text/sx">{extra_components}</script>')
|
||||
parts.append(_SX_STREAMING_RESOLVE.format(
|
||||
id=json.dumps(suspension_id),
|
||||
sx=json.dumps(sx_source),
|
||||
))
|
||||
return "\n".join(parts)
|
||||
|
||||
|
||||
_SCRIPT_HASH_CACHE: dict[str, str] = {}
|
||||
_STYLE_DICT_JSON: str = ""
|
||||
_STYLE_DICT_HASH: str = ""
|
||||
|
||||
@@ -22,7 +22,7 @@ from typing import Any, Callable, Awaitable
|
||||
class Layout:
|
||||
"""A named layout that generates header rows for full and OOB rendering."""
|
||||
|
||||
__slots__ = ("name", "_full_fn", "_oob_fn", "_mobile_fn")
|
||||
__slots__ = ("name", "_full_fn", "_oob_fn", "_mobile_fn", "component_names")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -30,11 +30,13 @@ class Layout:
|
||||
full_fn: Callable[..., str | Awaitable[str]],
|
||||
oob_fn: Callable[..., str | Awaitable[str]],
|
||||
mobile_fn: Callable[..., str | Awaitable[str]] | None = None,
|
||||
component_names: list[str] | None = None,
|
||||
):
|
||||
self.name = name
|
||||
self._full_fn = full_fn
|
||||
self._oob_fn = oob_fn
|
||||
self._mobile_fn = mobile_fn
|
||||
self.component_names = component_names or []
|
||||
|
||||
async def full_headers(self, ctx: dict, **kwargs: Any) -> str:
|
||||
result = self._full_fn(ctx, **kwargs)
|
||||
@@ -109,12 +111,14 @@ def register_sx_layout(name: str, full_defcomp: str, oob_defcomp: str,
|
||||
return await _render_to_sx_with_env(oob_defcomp, env)
|
||||
|
||||
mobile_fn = None
|
||||
comp_names = [f"~{full_defcomp}", f"~{oob_defcomp}"]
|
||||
if mobile_defcomp:
|
||||
async def mobile_fn(ctx: dict, **kw: Any) -> str:
|
||||
env = {k.replace("_", "-"): v for k, v in kw.items()}
|
||||
return await _render_to_sx_with_env(mobile_defcomp, env)
|
||||
comp_names.append(f"~{mobile_defcomp}")
|
||||
|
||||
register_layout(Layout(name, full_fn, oob_fn, mobile_fn))
|
||||
register_layout(Layout(name, full_fn, oob_fn, mobile_fn, comp_names))
|
||||
|
||||
|
||||
# Register built-in layouts via .sx defcomps
|
||||
|
||||
@@ -18,6 +18,7 @@ Usage::
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
@@ -86,10 +87,15 @@ def register_page_helpers(service: str, helpers: dict[str, Any]) -> None:
|
||||
for name in helpers:
|
||||
validate_helper(service, name)
|
||||
|
||||
# Wrap helpers to validate return values at the boundary
|
||||
# Wrap helpers to validate return values at the boundary.
|
||||
# Async generators pass through unwrapped — their yields are validated
|
||||
# by the streaming infrastructure, not at the helper boundary.
|
||||
wrapped: dict[str, Any] = {}
|
||||
for name, fn in helpers.items():
|
||||
if asyncio.iscoroutinefunction(fn):
|
||||
if inspect.isasyncgenfunction(fn):
|
||||
# Async generator: pass through (streaming infra validates yields)
|
||||
wrapped[name] = fn
|
||||
elif asyncio.iscoroutinefunction(fn):
|
||||
@functools.wraps(fn)
|
||||
async def _async_wrap(*a, _fn=fn, _name=name, **kw):
|
||||
result = await _fn(*a, **kw)
|
||||
@@ -168,6 +174,44 @@ async def _eval_slot(expr: Any, env: dict, ctx: Any) -> str:
|
||||
return await async_eval_slot_to_sx(expr, env, ctx)
|
||||
|
||||
|
||||
def _replace_suspense_sexp(sx: str, stream_id: str, replacement: str) -> str:
|
||||
"""Replace a rendered ~suspense div in SX source with replacement content.
|
||||
|
||||
After _eval_slot, ~suspense expands to:
|
||||
(div :id "sx-suspense-{id}" :data-suspense "{id}" :style "display:contents" ...)
|
||||
This finds the balanced s-expression containing :data-suspense "{id}" and
|
||||
replaces it with the given replacement string.
|
||||
"""
|
||||
marker = f':data-suspense "{stream_id}"'
|
||||
idx = sx.find(marker)
|
||||
if idx < 0:
|
||||
return sx
|
||||
# Walk backwards to find the opening paren of the containing (div ...)
|
||||
start = sx.rfind("(", 0, idx)
|
||||
if start < 0:
|
||||
return sx
|
||||
# Walk forward from start to find matching close paren (balanced)
|
||||
depth = 0
|
||||
i = start
|
||||
while i < len(sx):
|
||||
ch = sx[i]
|
||||
if ch == "(":
|
||||
depth += 1
|
||||
elif ch == ")":
|
||||
depth -= 1
|
||||
if depth == 0:
|
||||
return sx[:start] + replacement + sx[i + 1:]
|
||||
elif ch == '"':
|
||||
# Skip string contents
|
||||
i += 1
|
||||
while i < len(sx) and sx[i] != '"':
|
||||
if sx[i] == "\\":
|
||||
i += 1 # skip escaped char
|
||||
i += 1
|
||||
i += 1
|
||||
return sx
|
||||
|
||||
|
||||
async def execute_page(
|
||||
page_def: PageDef,
|
||||
service_name: str,
|
||||
@@ -207,15 +251,47 @@ async def execute_page(
|
||||
ctx = _get_request_context()
|
||||
|
||||
# Evaluate :data expression if present
|
||||
_multi_stream_content = None
|
||||
if page_def.data_expr is not None:
|
||||
data_result = await async_eval(page_def.data_expr, env, ctx)
|
||||
if isinstance(data_result, dict):
|
||||
if hasattr(data_result, '__aiter__'):
|
||||
# Multi-stream: consume generator, eval :content per chunk,
|
||||
# combine into shell with resolved suspense slots.
|
||||
chunks = []
|
||||
async for chunk in data_result:
|
||||
if not isinstance(chunk, dict):
|
||||
continue
|
||||
chunk = dict(chunk)
|
||||
stream_id = chunk.pop("stream-id", "stream-content")
|
||||
chunk_env = dict(env)
|
||||
for k, v in chunk.items():
|
||||
chunk_env[k.replace("_", "-")] = v
|
||||
chunk_sx = await _eval_slot(page_def.content_expr, chunk_env, ctx) if page_def.content_expr else ""
|
||||
chunks.append((stream_id, chunk_sx))
|
||||
# Build content: if :shell exists, render it and inline resolved chunks
|
||||
if page_def.shell_expr is not None:
|
||||
shell_sx = await _eval_slot(page_def.shell_expr, env, ctx)
|
||||
# Replace each rendered suspense div with resolved content.
|
||||
# _eval_slot expands ~suspense into:
|
||||
# (div :id "sx-suspense-X" :data-suspense "X" :style "display:contents" ...)
|
||||
# We find the balanced s-expr containing :data-suspense "X" and replace it.
|
||||
for stream_id, chunk_sx in chunks:
|
||||
shell_sx = _replace_suspense_sexp(shell_sx, stream_id, chunk_sx)
|
||||
_multi_stream_content = shell_sx
|
||||
else:
|
||||
# No shell: just concatenate all chunks in a fragment
|
||||
parts = " ".join(sx for _, sx in chunks)
|
||||
_multi_stream_content = f"(<> {parts})"
|
||||
elif isinstance(data_result, dict):
|
||||
# Merge with kebab-case keys so SX symbols can reference them
|
||||
for k, v in data_result.items():
|
||||
env[k.replace("_", "-")] = v
|
||||
|
||||
# Render content slot (required)
|
||||
content_sx = await _eval_slot(page_def.content_expr, env, ctx)
|
||||
if _multi_stream_content is not None:
|
||||
content_sx = _multi_stream_content
|
||||
else:
|
||||
content_sx = await _eval_slot(page_def.content_expr, env, ctx)
|
||||
|
||||
# Render optional slots
|
||||
filter_sx = ""
|
||||
@@ -309,6 +385,457 @@ async def execute_page(
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Streaming page execution (Phase 6: Streaming & Suspense)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def execute_page_streaming(
|
||||
page_def: PageDef,
|
||||
service_name: str,
|
||||
url_params: dict[str, Any] | None = None,
|
||||
):
|
||||
"""Execute a page with streaming response.
|
||||
|
||||
All context-dependent setup (g, request, current_app access) runs in
|
||||
this regular async function — called while the request context is live.
|
||||
Returns an async generator that yields pre-computed HTML chunks and
|
||||
awaits already-created tasks (no further context access needed).
|
||||
"""
|
||||
import asyncio
|
||||
from .jinja_bridge import get_component_env, _get_request_context
|
||||
from .async_eval import async_eval
|
||||
from .page import get_template_context
|
||||
from .helpers import (
|
||||
render_to_html as _helpers_render_to_html,
|
||||
sx_page_streaming_parts,
|
||||
sx_streaming_resolve_script,
|
||||
)
|
||||
from .parser import SxExpr, serialize as sx_serialize
|
||||
from .layouts import get_layout
|
||||
|
||||
if url_params is None:
|
||||
url_params = {}
|
||||
|
||||
env = dict(get_component_env())
|
||||
env.update(get_page_helpers(service_name))
|
||||
env.update(page_def.closure)
|
||||
for key, val in url_params.items():
|
||||
kebab = key.replace("_", "-")
|
||||
env[kebab] = val
|
||||
env[key] = val
|
||||
|
||||
ctx = _get_request_context()
|
||||
tctx = await get_template_context()
|
||||
|
||||
# Build fallback expressions
|
||||
if page_def.fallback_expr is not None:
|
||||
fallback_sx = sx_serialize(page_def.fallback_expr)
|
||||
else:
|
||||
fallback_sx = (
|
||||
'(div :class "p-8 animate-pulse"'
|
||||
' (div :class "h-8 bg-stone-200 rounded mb-4 w-1/3")'
|
||||
' (div :class "h-64 bg-stone-200 rounded"))'
|
||||
)
|
||||
header_fallback = '(div :class "h-12 bg-stone-200 animate-pulse")'
|
||||
|
||||
# Resolve layout
|
||||
layout = None
|
||||
layout_kwargs: dict[str, Any] = {}
|
||||
if page_def.layout is not None:
|
||||
if isinstance(page_def.layout, str):
|
||||
layout_name = page_def.layout
|
||||
elif isinstance(page_def.layout, list):
|
||||
from .types import Keyword as SxKeyword, Symbol as SxSymbol
|
||||
raw = page_def.layout
|
||||
first = raw[0]
|
||||
layout_name = (
|
||||
first.name if isinstance(first, (SxKeyword, SxSymbol))
|
||||
else str(first)
|
||||
)
|
||||
i = 1
|
||||
while i < len(raw):
|
||||
k = raw[i]
|
||||
if isinstance(k, SxKeyword) and i + 1 < len(raw):
|
||||
resolved = await async_eval(raw[i + 1], env, ctx)
|
||||
layout_kwargs[k.name.replace("-", "_")] = resolved
|
||||
i += 2
|
||||
else:
|
||||
i += 1
|
||||
else:
|
||||
layout_name = str(page_def.layout)
|
||||
layout = get_layout(layout_name)
|
||||
|
||||
# --- Launch concurrent IO tasks (inherit context via create_task) ---
|
||||
|
||||
_stream_queue: asyncio.Queue = asyncio.Queue()
|
||||
_multi_stream = False
|
||||
|
||||
async def _eval_data_and_content():
|
||||
"""Evaluate :data then :content.
|
||||
|
||||
If :data returns an async generator (multi-stream mode), iterate it
|
||||
and push each (stream_id, content_sx) to _stream_queue incrementally.
|
||||
The main stream loop drains the queue and sends resolve scripts as
|
||||
items arrive — giving true staggered streaming.
|
||||
"""
|
||||
nonlocal _multi_stream
|
||||
try:
|
||||
data_env = dict(env)
|
||||
if page_def.data_expr is not None:
|
||||
data_result = await async_eval(page_def.data_expr, data_env, ctx)
|
||||
# Async generator: multi-stream mode
|
||||
if hasattr(data_result, '__aiter__'):
|
||||
_multi_stream = True
|
||||
async for chunk in data_result:
|
||||
if not isinstance(chunk, dict):
|
||||
continue
|
||||
chunk = dict(chunk) # copy so pop doesn't mutate
|
||||
stream_id = chunk.pop("stream-id", "stream-content")
|
||||
chunk_env = dict(env)
|
||||
for k, v in chunk.items():
|
||||
chunk_env[k.replace("_", "-")] = v
|
||||
content_sx = await _eval_slot(page_def.content_expr, chunk_env, ctx) if page_def.content_expr else ""
|
||||
await _stream_queue.put(("data", stream_id, content_sx))
|
||||
await _stream_queue.put(("data-done",))
|
||||
return
|
||||
if isinstance(data_result, dict):
|
||||
for k, v in data_result.items():
|
||||
data_env[k.replace("_", "-")] = v
|
||||
content_sx = await _eval_slot(page_def.content_expr, data_env, ctx) if page_def.content_expr else ""
|
||||
filter_sx = await _eval_slot(page_def.filter_expr, data_env, ctx) if page_def.filter_expr else ""
|
||||
aside_sx = await _eval_slot(page_def.aside_expr, data_env, ctx) if page_def.aside_expr else ""
|
||||
menu_sx = await _eval_slot(page_def.menu_expr, data_env, ctx) if page_def.menu_expr else ""
|
||||
await _stream_queue.put(("data-single", content_sx, filter_sx, aside_sx, menu_sx))
|
||||
except Exception as e:
|
||||
logger.error("Streaming data task failed: %s", e)
|
||||
await _stream_queue.put(("data-done",))
|
||||
|
||||
async def _eval_headers():
|
||||
try:
|
||||
if layout is None:
|
||||
await _stream_queue.put(("headers", "", ""))
|
||||
return
|
||||
rows = await layout.full_headers(tctx, **layout_kwargs)
|
||||
menu = await layout.mobile_menu(tctx, **layout_kwargs)
|
||||
await _stream_queue.put(("headers", rows, menu))
|
||||
except Exception as e:
|
||||
logger.error("Streaming headers task failed: %s", e)
|
||||
await _stream_queue.put(("headers", "", ""))
|
||||
|
||||
data_task = asyncio.create_task(_eval_data_and_content())
|
||||
header_task = asyncio.create_task(_eval_headers())
|
||||
|
||||
# --- Build initial shell as HTML (still in request context) ---
|
||||
# Render to HTML so [data-suspense] elements are real DOM immediately.
|
||||
# No dependency on sx-browser.js boot timing for the initial shell.
|
||||
|
||||
suspense_header_sx = f'(~suspense :id "stream-headers" :fallback {header_fallback})'
|
||||
|
||||
# When :shell is provided, it renders directly as the content slot
|
||||
# (it contains its own ~suspense for the data-dependent part).
|
||||
# Otherwise, wrap the entire :content in a single suspense.
|
||||
if page_def.shell_expr is not None:
|
||||
shell_content_sx = await _eval_slot(page_def.shell_expr, env, ctx)
|
||||
suspense_content_sx = shell_content_sx
|
||||
else:
|
||||
suspense_content_sx = f'(~suspense :id "stream-content" :fallback {fallback_sx})'
|
||||
|
||||
initial_page_html = await _helpers_render_to_html("app-body",
|
||||
header_rows=SxExpr(suspense_header_sx),
|
||||
content=SxExpr(suspense_content_sx),
|
||||
)
|
||||
|
||||
# Include layout component refs + page content so the scan picks up
|
||||
# their transitive deps (e.g. ~cart-mini, ~auth-menu in headers).
|
||||
layout_refs = ""
|
||||
if layout is not None and hasattr(layout, "component_names"):
|
||||
layout_refs = " ".join(f"({n})" for n in layout.component_names)
|
||||
content_ref = ""
|
||||
if page_def.content_expr is not None:
|
||||
content_ref = sx_serialize(page_def.content_expr)
|
||||
shell_ref = ""
|
||||
if page_def.shell_expr is not None:
|
||||
shell_ref = sx_serialize(page_def.shell_expr)
|
||||
page_sx_for_scan = f'(<> {layout_refs} {content_ref} {shell_ref} (~app-body :header-rows {suspense_header_sx} :content {suspense_content_sx}))'
|
||||
shell, tail = sx_page_streaming_parts(
|
||||
tctx, initial_page_html, page_sx=page_sx_for_scan,
|
||||
)
|
||||
|
||||
# Capture component env + extras scanner while we still have context.
|
||||
# Resolved SX may reference components not in the initial scan
|
||||
# (e.g. ~cart-mini from IO-generated header content).
|
||||
from .jinja_bridge import components_for_page as _comp_scan
|
||||
from quart import current_app as _ca
|
||||
_service = _ca.name
|
||||
# Track which components were already sent in the shell
|
||||
_shell_scan = page_sx_for_scan
|
||||
|
||||
def _extra_defs(sx_source: str) -> str:
|
||||
"""Return component defs needed by sx_source but not in shell."""
|
||||
from .deps import components_needed
|
||||
comp_env = dict(get_component_env())
|
||||
shell_needed = components_needed(_shell_scan, comp_env)
|
||||
resolve_needed = components_needed(sx_source, comp_env)
|
||||
extra = resolve_needed - shell_needed
|
||||
if not extra:
|
||||
return ""
|
||||
from .parser import serialize
|
||||
from .types import Component
|
||||
parts = []
|
||||
for key, val in comp_env.items():
|
||||
if isinstance(val, Component) and (f"~{val.name}" in extra or key in extra):
|
||||
param_strs = ["&key"] + list(val.params)
|
||||
if val.has_children:
|
||||
param_strs.extend(["&rest", "children"])
|
||||
params_sx = "(" + " ".join(param_strs) + ")"
|
||||
body_sx = serialize(val.body, pretty=True)
|
||||
parts.append(f"(defcomp ~{val.name} {params_sx} {body_sx})")
|
||||
return "\n".join(parts)
|
||||
|
||||
# --- Return async generator that yields chunks ---
|
||||
# No context access needed below — just awaiting tasks and yielding strings.
|
||||
|
||||
async def _stream_chunks():
|
||||
yield shell + tail
|
||||
|
||||
# Both tasks push tagged items onto _stream_queue. We drain until
|
||||
# both are done. Items: ("headers", rows, menu), ("data-single", ...),
|
||||
# ("data", stream_id, sx), ("data-done",).
|
||||
remaining = 2 # waiting for: headers + data
|
||||
while remaining > 0:
|
||||
item = await _stream_queue.get()
|
||||
kind = item[0]
|
||||
try:
|
||||
if kind == "headers":
|
||||
_, header_rows, header_menu = item
|
||||
remaining -= 1
|
||||
if header_rows:
|
||||
extras = _extra_defs(header_rows)
|
||||
yield sx_streaming_resolve_script("stream-headers", header_rows, extras)
|
||||
elif kind == "data-single":
|
||||
_, content_sx, filter_sx, aside_sx, menu_sx = item
|
||||
remaining -= 1
|
||||
extras = _extra_defs(content_sx)
|
||||
yield sx_streaming_resolve_script("stream-content", content_sx, extras)
|
||||
elif kind == "data":
|
||||
_, stream_id, content_sx = item
|
||||
extras = _extra_defs(content_sx)
|
||||
yield sx_streaming_resolve_script(stream_id, content_sx, extras)
|
||||
elif kind == "data-done":
|
||||
remaining -= 1
|
||||
except Exception as e:
|
||||
logger.error("Streaming resolve failed for %s: %s", kind, e)
|
||||
|
||||
yield "\n</body>\n</html>"
|
||||
|
||||
return _stream_chunks()
|
||||
|
||||
|
||||
async def execute_page_streaming_oob(
|
||||
page_def: PageDef,
|
||||
service_name: str,
|
||||
url_params: dict[str, Any] | None = None,
|
||||
):
|
||||
"""Execute a streaming page for HTMX/SX requests.
|
||||
|
||||
Like execute_page_streaming but yields OOB SX swap format instead of a
|
||||
full HTML document:
|
||||
1. First yield: OOB SX with shell content (suspense skeletons) + CSS + defs
|
||||
2. Subsequent yields: __sxResolve script tags as data resolves
|
||||
|
||||
The client uses streaming fetch (ReadableStream) to process the OOB swap
|
||||
immediately and then execute resolve scripts as they arrive.
|
||||
"""
|
||||
import asyncio
|
||||
from .jinja_bridge import get_component_env, _get_request_context
|
||||
from .async_eval import async_eval
|
||||
from .page import get_template_context
|
||||
from .helpers import (
|
||||
oob_page_sx,
|
||||
sx_streaming_resolve_script,
|
||||
components_for_request,
|
||||
SxExpr,
|
||||
)
|
||||
from .parser import serialize as sx_serialize
|
||||
from .layouts import get_layout
|
||||
|
||||
if url_params is None:
|
||||
url_params = {}
|
||||
|
||||
env = dict(get_component_env())
|
||||
env.update(get_page_helpers(service_name))
|
||||
env.update(page_def.closure)
|
||||
for key, val in url_params.items():
|
||||
kebab = key.replace("_", "-")
|
||||
env[kebab] = val
|
||||
env[key] = val
|
||||
|
||||
ctx = _get_request_context()
|
||||
|
||||
# Evaluate shell with suspense skeletons (no data yet)
|
||||
shell_sx = ""
|
||||
if page_def.shell_expr is not None:
|
||||
shell_sx = await _eval_slot(page_def.shell_expr, env, ctx)
|
||||
|
||||
# Build initial OOB response with shell as content
|
||||
tctx = await get_template_context()
|
||||
|
||||
# Resolve layout for OOB headers
|
||||
layout = None
|
||||
layout_kwargs: dict[str, Any] = {}
|
||||
if page_def.layout is not None:
|
||||
if isinstance(page_def.layout, str):
|
||||
layout_name = page_def.layout
|
||||
elif isinstance(page_def.layout, list):
|
||||
from .types import Keyword as SxKeyword, Symbol as SxSymbol
|
||||
raw = page_def.layout
|
||||
first = raw[0]
|
||||
layout_name = (
|
||||
first.name if isinstance(first, (SxKeyword, SxSymbol))
|
||||
else str(first)
|
||||
)
|
||||
i = 1
|
||||
while i < len(raw):
|
||||
k = raw[i]
|
||||
if isinstance(k, SxKeyword) and i + 1 < len(raw):
|
||||
resolved = await async_eval(raw[i + 1], env, ctx)
|
||||
layout_kwargs[k.name.replace("-", "_")] = resolved
|
||||
i += 2
|
||||
else:
|
||||
i += 1
|
||||
else:
|
||||
layout_name = str(page_def.layout)
|
||||
layout = get_layout(layout_name)
|
||||
|
||||
# Launch concurrent tasks
|
||||
_stream_queue: asyncio.Queue = asyncio.Queue()
|
||||
|
||||
async def _eval_data():
|
||||
try:
|
||||
if page_def.data_expr is not None:
|
||||
data_result = await async_eval(page_def.data_expr, env, ctx)
|
||||
if hasattr(data_result, '__aiter__'):
|
||||
async for chunk in data_result:
|
||||
if not isinstance(chunk, dict):
|
||||
continue
|
||||
chunk = dict(chunk)
|
||||
stream_id = chunk.pop("stream-id", "stream-content")
|
||||
chunk_env = dict(env)
|
||||
for k, v in chunk.items():
|
||||
chunk_env[k.replace("_", "-")] = v
|
||||
content_sx = await _eval_slot(page_def.content_expr, chunk_env, ctx) if page_def.content_expr else ""
|
||||
await _stream_queue.put(("data", stream_id, content_sx))
|
||||
await _stream_queue.put(("data-done",))
|
||||
return
|
||||
await _stream_queue.put(("data-done",))
|
||||
except Exception as e:
|
||||
logger.error("Streaming OOB data task failed: %s", e)
|
||||
await _stream_queue.put(("data-done",))
|
||||
|
||||
async def _eval_oob_headers():
|
||||
try:
|
||||
if layout is not None:
|
||||
oob_headers = await layout.oob_headers(tctx, **layout_kwargs)
|
||||
await _stream_queue.put(("headers", oob_headers))
|
||||
else:
|
||||
await _stream_queue.put(("headers", ""))
|
||||
except Exception as e:
|
||||
logger.error("Streaming OOB headers task failed: %s", e)
|
||||
await _stream_queue.put(("headers", ""))
|
||||
|
||||
data_task = asyncio.create_task(_eval_data())
|
||||
header_task = asyncio.create_task(_eval_oob_headers())
|
||||
|
||||
# Build initial OOB body with shell content (skeletons in place)
|
||||
oob_body = await oob_page_sx(
|
||||
oobs="", # headers will arrive via resolve script
|
||||
content=shell_sx,
|
||||
)
|
||||
|
||||
# Prepend component definitions + CSS (like sx_response does)
|
||||
from quart import request
|
||||
comp_defs = components_for_request(oob_body)
|
||||
body = oob_body
|
||||
if comp_defs:
|
||||
body = (f'<script type="text/sx" data-components>'
|
||||
f'{comp_defs}</script>\n{body}')
|
||||
|
||||
from .css_registry import scan_classes_from_sx, lookup_rules, registry_loaded
|
||||
if registry_loaded():
|
||||
new_classes = scan_classes_from_sx(oob_body)
|
||||
if comp_defs:
|
||||
new_classes.update(scan_classes_from_sx(comp_defs))
|
||||
known_raw = request.headers.get("SX-Css", "")
|
||||
if known_raw:
|
||||
from .css_registry import lookup_css_hash
|
||||
if len(known_raw) <= 16:
|
||||
looked_up = lookup_css_hash(known_raw)
|
||||
known_classes = looked_up if looked_up is not None else set()
|
||||
else:
|
||||
known_classes = set(known_raw.split(","))
|
||||
new_classes -= known_classes
|
||||
if new_classes:
|
||||
new_rules = lookup_rules(new_classes)
|
||||
if new_rules:
|
||||
body = f'<style data-sx-css>{new_rules}</style>\n{body}'
|
||||
|
||||
# Capture component env for extra defs in resolve chunks
|
||||
from .jinja_bridge import components_for_page as _comp_scan
|
||||
_base_scan = oob_body
|
||||
|
||||
def _extra_defs(sx_source: str) -> str:
|
||||
from .deps import components_needed
|
||||
comp_env = dict(get_component_env())
|
||||
base_needed = components_needed(_base_scan, comp_env)
|
||||
resolve_needed = components_needed(sx_source, comp_env)
|
||||
extra = resolve_needed - base_needed
|
||||
if not extra:
|
||||
return ""
|
||||
from .parser import serialize
|
||||
from .types import Component
|
||||
parts = []
|
||||
for key, val in comp_env.items():
|
||||
if isinstance(val, Component) and (f"~{val.name}" in extra or key in extra):
|
||||
param_strs = ["&key"] + list(val.params)
|
||||
if val.has_children:
|
||||
param_strs.extend(["&rest", "children"])
|
||||
params_sx = "(" + " ".join(param_strs) + ")"
|
||||
body_sx = serialize(val.body, pretty=True)
|
||||
parts.append(f"(defcomp ~{val.name} {params_sx} {body_sx})")
|
||||
return "\n".join(parts)
|
||||
|
||||
# Yield chunks
|
||||
async def _stream_oob_chunks():
|
||||
# First chunk: OOB swap with skeletons
|
||||
yield body
|
||||
|
||||
# Drain queue for resolve scripts
|
||||
remaining = 2 # headers + data
|
||||
while remaining > 0:
|
||||
item = await _stream_queue.get()
|
||||
kind = item[0]
|
||||
try:
|
||||
if kind == "headers":
|
||||
_, oob_hdr = item
|
||||
remaining -= 1
|
||||
# Headers don't need resolve scripts for OOB — they're
|
||||
# handled by OOB swap attributes in the SX content itself.
|
||||
# But if we have header content, send a resolve for it.
|
||||
if oob_hdr:
|
||||
extras = _extra_defs(oob_hdr)
|
||||
yield sx_streaming_resolve_script("stream-headers", oob_hdr, extras)
|
||||
elif kind == "data":
|
||||
_, stream_id, content_sx = item
|
||||
extras = _extra_defs(content_sx)
|
||||
yield sx_streaming_resolve_script(stream_id, content_sx, extras)
|
||||
elif kind == "data-done":
|
||||
remaining -= 1
|
||||
except Exception as e:
|
||||
logger.error("Streaming OOB resolve failed for %s: %s", kind, e)
|
||||
|
||||
return _stream_oob_chunks()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Blueprint mounting
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -356,17 +883,32 @@ def mount_pages(bp: Any, service_name: str,
|
||||
|
||||
def _mount_one_page(bp: Any, service_name: str, page_def: PageDef) -> None:
|
||||
"""Mount a single PageDef as a GET route on the blueprint."""
|
||||
from quart import make_response
|
||||
from quart import make_response, Response
|
||||
|
||||
# Build the view function
|
||||
async def page_view(**kwargs: Any) -> Any:
|
||||
# Re-fetch the page from registry to support hot-reload of content
|
||||
current = get_page(service_name, page_def.name) or page_def
|
||||
result = await execute_page(current, service_name, url_params=kwargs)
|
||||
# If result is already a Response (from sx_response), return it
|
||||
if hasattr(result, "status_code"):
|
||||
return result
|
||||
return await make_response(result, 200)
|
||||
if page_def.stream:
|
||||
# Streaming response: yields chunks as IO resolves
|
||||
async def page_view(**kwargs: Any) -> Any:
|
||||
from shared.browser.app.utils.htmx import is_htmx_request
|
||||
current = get_page(service_name, page_def.name) or page_def
|
||||
if is_htmx_request():
|
||||
# Streaming OOB: shell with skeletons first, then resolve scripts
|
||||
gen = await execute_page_streaming_oob(
|
||||
current, service_name, url_params=kwargs,
|
||||
)
|
||||
return Response(gen, content_type="text/sx; charset=utf-8")
|
||||
# Full page streaming: HTML document with inline resolve scripts
|
||||
gen = await execute_page_streaming(
|
||||
current, service_name, url_params=kwargs,
|
||||
)
|
||||
return Response(gen, content_type="text/html; charset=utf-8")
|
||||
else:
|
||||
# Standard non-streaming response
|
||||
async def page_view(**kwargs: Any) -> Any:
|
||||
current = get_page(service_name, page_def.name) or page_def
|
||||
result = await execute_page(current, service_name, url_params=kwargs)
|
||||
if hasattr(result, "status_code"):
|
||||
return result
|
||||
return await make_response(result, 200)
|
||||
|
||||
# Give the view function a unique name for Quart's routing
|
||||
page_view.__name__ = f"defpage_{page_def.name.replace('-', '_')}"
|
||||
@@ -482,7 +1024,14 @@ async def evaluate_page_data(
|
||||
|
||||
data_result = await async_eval(page_def.data_expr, env, ctx)
|
||||
|
||||
# Kebab-case dict keys (matching execute_page line 214-215)
|
||||
# Multi-stream: async generator can't be serialized as a single dict.
|
||||
# Return nil to signal the client to fall back to server-side rendering.
|
||||
if hasattr(data_result, '__aiter__'):
|
||||
# Close the generator cleanly
|
||||
await data_result.aclose()
|
||||
return "nil"
|
||||
|
||||
# Kebab-case dict keys (matching execute_page)
|
||||
if isinstance(data_result, dict):
|
||||
data_result = {
|
||||
k.replace("_", "-"): v for k, v in data_result.items()
|
||||
|
||||
@@ -62,6 +62,24 @@ class SxExpr(str):
|
||||
# Errors
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_ESCAPE_MAP = {"n": "\n", "t": "\t", '"': '"', "\\": "\\", "/": "/"}
|
||||
|
||||
|
||||
def _unescape_string(s: str) -> str:
|
||||
"""Process escape sequences in a parsed string, character by character."""
|
||||
out: list[str] = []
|
||||
i = 0
|
||||
while i < len(s):
|
||||
if s[i] == "\\" and i + 1 < len(s):
|
||||
nxt = s[i + 1]
|
||||
out.append(_ESCAPE_MAP.get(nxt, nxt))
|
||||
i += 2
|
||||
else:
|
||||
out.append(s[i])
|
||||
i += 1
|
||||
return "".join(out)
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
"""Error during s-expression parsing."""
|
||||
|
||||
@@ -141,11 +159,7 @@ class Tokenizer:
|
||||
raise ParseError("Unterminated string", self.pos, self.line, self.col)
|
||||
self._advance(m.end() - self.pos)
|
||||
content = m.group()[1:-1]
|
||||
content = content.replace("\\n", "\n")
|
||||
content = content.replace("\\t", "\t")
|
||||
content = content.replace('\\"', '"')
|
||||
content = content.replace("\\/", "/")
|
||||
content = content.replace("\\\\", "\\")
|
||||
content = _unescape_string(content)
|
||||
return content
|
||||
|
||||
# Keyword
|
||||
|
||||
@@ -374,6 +374,8 @@ def prim_get(coll: Any, key: Any, default: Any = None) -> Any:
|
||||
return default
|
||||
if isinstance(coll, list):
|
||||
return coll[key] if 0 <= key < len(coll) else default
|
||||
if hasattr(coll, "get"):
|
||||
return coll.get(key, default)
|
||||
return default
|
||||
|
||||
@register_primitive("len")
|
||||
|
||||
@@ -87,9 +87,19 @@ async def _async_eval(expr, env, ctx):
|
||||
args, kwargs = await _parse_io_args(expr[1:], env, ctx)
|
||||
return await execute_io(head.name, args, kwargs, ctx)
|
||||
|
||||
# Check if this is a render expression (HTML tag, component, fragment)
|
||||
# so we can wrap the result in _RawHTML to prevent double-escaping.
|
||||
# The sync evaluator returns plain strings from render_list_to_html;
|
||||
# the async renderer would HTML-escape those without this wrapper.
|
||||
is_render = isinstance(expr, list) and sx_ref.is_render_expr(expr)
|
||||
|
||||
# For everything else, use the sync transpiled evaluator
|
||||
result = sx_ref.eval_expr(expr, env)
|
||||
return sx_ref.trampoline(result)
|
||||
result = sx_ref.trampoline(result)
|
||||
|
||||
if is_render and isinstance(result, str):
|
||||
return _RawHTML(result)
|
||||
return result
|
||||
|
||||
|
||||
async def _parse_io_args(exprs, env, ctx):
|
||||
@@ -124,6 +134,9 @@ async def _arender(expr, env, ctx):
|
||||
return ""
|
||||
if isinstance(expr, _RawHTML):
|
||||
return expr.html
|
||||
# Also handle sx_ref._RawHTML from the sync evaluator
|
||||
if isinstance(expr, sx_ref._RawHTML):
|
||||
return expr.html
|
||||
if isinstance(expr, str):
|
||||
return escape_text(expr)
|
||||
if isinstance(expr, (int, float)):
|
||||
|
||||
@@ -91,6 +91,38 @@
|
||||
(sx-hydrate-elements el))))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Resolve Suspense — replace streaming placeholder with resolved content
|
||||
;; --------------------------------------------------------------------------
|
||||
;;
|
||||
;; Called by inline <script> tags that arrive during chunked transfer:
|
||||
;; __sxResolve("content", "(~article :title \"Hello\")")
|
||||
;;
|
||||
;; Finds the suspense wrapper by data-suspense attribute, renders the
|
||||
;; new SX content, and replaces the wrapper's children.
|
||||
|
||||
(define resolve-suspense
|
||||
(fn (id sx)
|
||||
;; Process any new <script type="text/sx"> tags that arrived via
|
||||
;; streaming (e.g. extra component defs) before resolving.
|
||||
(process-sx-scripts nil)
|
||||
(let ((el (dom-query (str "[data-suspense=\"" id "\"]"))))
|
||||
(if el
|
||||
(do
|
||||
;; parse returns a list of expressions — render each individually
|
||||
;; (mirroring the public render() API).
|
||||
(let ((exprs (parse sx))
|
||||
(env (get-render-env nil)))
|
||||
(dom-set-text-content el "")
|
||||
(for-each (fn (expr)
|
||||
(dom-append el (render-to-dom expr env nil)))
|
||||
exprs)
|
||||
(process-elements el)
|
||||
(sx-hydrate-elements el)
|
||||
(dom-dispatch el "sx:resolved" {:id id})))
|
||||
(log-warn (str "resolveSuspense: no element for id=" id))))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Hydrate — render all [data-sx] elements
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
@@ -361,6 +361,7 @@ class JSEmitter:
|
||||
"fetch-request": "fetchRequest",
|
||||
"fetch-location": "fetchLocation",
|
||||
"fetch-and-restore": "fetchAndRestore",
|
||||
"fetch-streaming": "fetchStreaming",
|
||||
"fetch-preload": "fetchPreload",
|
||||
"dom-query-by-id": "domQueryById",
|
||||
"dom-matches?": "domMatches",
|
||||
@@ -480,6 +481,7 @@ class JSEmitter:
|
||||
"init-style-dict": "initStyleDict",
|
||||
"SX_VERSION": "SX_VERSION",
|
||||
"boot-init": "bootInit",
|
||||
"resolve-suspense": "resolveSuspense",
|
||||
"resolve-mount-target": "resolveMountTarget",
|
||||
"sx-render-with-env": "sxRenderWithEnv",
|
||||
"get-render-env": "getRenderEnv",
|
||||
@@ -2680,7 +2682,7 @@ PLATFORM_DOM_JS = """
|
||||
|
||||
function domCreateElement(tag, ns) {
|
||||
if (!_hasDom) return null;
|
||||
if (ns) return document.createElementNS(ns, tag);
|
||||
if (ns && ns !== NIL) return document.createElementNS(ns, tag);
|
||||
return document.createElement(tag);
|
||||
}
|
||||
|
||||
@@ -3122,6 +3124,134 @@ PLATFORM_ORCHESTRATION_JS = """
|
||||
}).catch(function() { location.reload(); });
|
||||
}
|
||||
|
||||
function fetchStreaming(target, url, headers) {
|
||||
// Streaming fetch for multi-stream pages.
|
||||
// First chunk = OOB SX swap (shell with skeletons).
|
||||
// Subsequent chunks = __sxResolve script tags filling suspense slots.
|
||||
var opts = { headers: headers };
|
||||
try {
|
||||
var h = new URL(url, location.href).hostname;
|
||||
if (h !== location.hostname &&
|
||||
(h.indexOf(".rose-ash.com") >= 0 || h.indexOf(".localhost") >= 0)) {
|
||||
opts.credentials = "include";
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
fetch(url, opts).then(function(resp) {
|
||||
if (!resp.ok || !resp.body) {
|
||||
// Fallback: non-streaming
|
||||
return resp.text().then(function(text) {
|
||||
text = stripComponentScripts(text);
|
||||
text = extractResponseCss(text);
|
||||
text = text.trim();
|
||||
if (text.charAt(0) === "(") {
|
||||
var dom = sxRender(text);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var reader = resp.body.getReader();
|
||||
var decoder = new TextDecoder();
|
||||
var buffer = "";
|
||||
var initialSwapDone = false;
|
||||
// Regex to match __sxResolve script tags
|
||||
var RESOLVE_START = "<script>window.__sxResolve&&window.__sxResolve(";
|
||||
var RESOLVE_END = ")</script>";
|
||||
|
||||
function processResolveScripts() {
|
||||
// Strip and load any extra component defs before resolve scripts
|
||||
buffer = stripSxScripts(buffer);
|
||||
var idx;
|
||||
while ((idx = buffer.indexOf(RESOLVE_START)) >= 0) {
|
||||
var endIdx = buffer.indexOf(RESOLVE_END, idx);
|
||||
if (endIdx < 0) break; // incomplete, wait for more data
|
||||
var argsStr = buffer.substring(idx + RESOLVE_START.length, endIdx);
|
||||
buffer = buffer.substring(endIdx + RESOLVE_END.length);
|
||||
// argsStr is: "stream-id","sx source"
|
||||
var commaIdx = argsStr.indexOf(",");
|
||||
if (commaIdx >= 0) {
|
||||
try {
|
||||
var id = JSON.parse(argsStr.substring(0, commaIdx));
|
||||
var sx = JSON.parse(argsStr.substring(commaIdx + 1));
|
||||
if (typeof Sx !== "undefined" && Sx.resolveSuspense) {
|
||||
Sx.resolveSuspense(id, sx);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[sx-ref] resolve parse error:", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function pump() {
|
||||
return reader.read().then(function(result) {
|
||||
buffer += decoder.decode(result.value || new Uint8Array(), { stream: !result.done });
|
||||
|
||||
if (!initialSwapDone) {
|
||||
// Look for the first resolve script — everything before it is OOB content
|
||||
var scriptIdx = buffer.indexOf("<script>window.__sxResolve");
|
||||
// If we found a script tag, or the stream is done, process OOB
|
||||
var oobEnd = scriptIdx >= 0 ? scriptIdx : (result.done ? buffer.length : -1);
|
||||
if (oobEnd >= 0) {
|
||||
var oobContent = buffer.substring(0, oobEnd);
|
||||
buffer = buffer.substring(oobEnd);
|
||||
initialSwapDone = true;
|
||||
|
||||
// Process OOB SX content (same as fetchAndRestore)
|
||||
oobContent = stripComponentScripts(oobContent);
|
||||
// Also strip bare <script type="text/sx"> (extra defs from resolve chunks)
|
||||
oobContent = stripSxScripts(oobContent);
|
||||
oobContent = extractResponseCss(oobContent);
|
||||
oobContent = oobContent.trim();
|
||||
if (oobContent.charAt(0) === "(") {
|
||||
try {
|
||||
var dom = sxRender(oobContent);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
// Dispatch clientRoute so nav links update active state
|
||||
domDispatch(target, "sx:clientRoute",
|
||||
{ pathname: new URL(url, location.href).pathname });
|
||||
} catch (err) {
|
||||
console.error("[sx-ref] streaming OOB swap error:", err);
|
||||
}
|
||||
}
|
||||
// Process any resolve scripts already in buffer
|
||||
processResolveScripts();
|
||||
}
|
||||
} else {
|
||||
// Process resolve scripts as they arrive
|
||||
processResolveScripts();
|
||||
}
|
||||
|
||||
if (!result.done) return pump();
|
||||
});
|
||||
}
|
||||
|
||||
return pump();
|
||||
}).catch(function(err) {
|
||||
console.error("[sx-ref] streaming fetch error:", err);
|
||||
location.reload();
|
||||
});
|
||||
}
|
||||
|
||||
function fetchPreload(url, headers, cache) {
|
||||
fetch(url, { headers: headers }).then(function(resp) {
|
||||
if (!resp.ok) return;
|
||||
@@ -3586,6 +3716,14 @@ PLATFORM_ORCHESTRATION_JS = """
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function stripSxScripts(text) {
|
||||
// Strip <script type="text/sx">...</script> (without data-components).
|
||||
// These contain extra component defs from streaming resolve chunks.
|
||||
var SxObj = typeof Sx !== "undefined" ? Sx : null;
|
||||
return text.replace(/<script[^>]*type="text\\/sx"[^>]*>([\\s\\S]*?)<\\/script>/gi,
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function extractResponseCss(text) {
|
||||
if (!_hasDom) return text;
|
||||
var target = document.getElementById("sx-css");
|
||||
@@ -4006,6 +4144,12 @@ def public_api_js(has_html, has_sx, has_dom, has_engine, has_orch, has_cssx, has
|
||||
api_lines.append(' renderToDom: _hasDom ? function(expr, env, ns) { return renderToDom(expr, env || merge(componentEnv), ns || null); } : null,')
|
||||
if has_engine:
|
||||
api_lines.append(' parseTriggerSpec: typeof parseTriggerSpec === "function" ? parseTriggerSpec : null,')
|
||||
api_lines.append(' parseTime: typeof parseTime === "function" ? parseTime : null,')
|
||||
api_lines.append(' defaultTrigger: typeof defaultTrigger === "function" ? defaultTrigger : null,')
|
||||
api_lines.append(' parseSwapSpec: typeof parseSwapSpec === "function" ? parseSwapSpec : null,')
|
||||
api_lines.append(' parseRetrySpec: typeof parseRetrySpec === "function" ? parseRetrySpec : null,')
|
||||
api_lines.append(' nextRetryMs: typeof nextRetryMs === "function" ? nextRetryMs : null,')
|
||||
api_lines.append(' filterParams: typeof filterParams === "function" ? filterParams : null,')
|
||||
api_lines.append(' morphNode: typeof morphNode === "function" ? morphNode : null,')
|
||||
api_lines.append(' morphChildren: typeof morphChildren === "function" ? morphChildren : null,')
|
||||
api_lines.append(' swapDomNodes: typeof swapDomNodes === "function" ? swapDomNodes : null,')
|
||||
@@ -4020,11 +4164,13 @@ def public_api_js(has_html, has_sx, has_dom, has_engine, has_orch, has_cssx, has
|
||||
api_lines.append(' update: typeof sxUpdateElement === "function" ? sxUpdateElement : null,')
|
||||
api_lines.append(' renderComponent: typeof sxRenderComponent === "function" ? sxRenderComponent : null,')
|
||||
api_lines.append(' getEnv: function() { return componentEnv; },')
|
||||
api_lines.append(' resolveSuspense: typeof resolveSuspense === "function" ? resolveSuspense : null,')
|
||||
api_lines.append(' init: typeof bootInit === "function" ? bootInit : null,')
|
||||
elif has_orch:
|
||||
api_lines.append(' init: typeof engineInit === "function" ? engineInit : null,')
|
||||
if has_deps:
|
||||
api_lines.append(' scanRefs: scanRefs,')
|
||||
api_lines.append(' scanComponentsFromSource: scanComponentsFromSource,')
|
||||
api_lines.append(' transitiveDeps: transitiveDeps,')
|
||||
api_lines.append(' computeAllDeps: computeAllDeps,')
|
||||
api_lines.append(' componentsNeeded: componentsNeeded,')
|
||||
@@ -4060,7 +4206,18 @@ def public_api_js(has_html, has_sx, has_dom, has_engine, has_orch, has_cssx, has
|
||||
api_lines.append('''
|
||||
// --- Auto-init ---
|
||||
if (typeof document !== "undefined") {
|
||||
var _sxInit = function() { bootInit(); };
|
||||
var _sxInit = function() {
|
||||
bootInit();
|
||||
// Process any suspense resolutions that arrived before init
|
||||
if (global.__sxPending) {
|
||||
for (var pi = 0; pi < global.__sxPending.length; pi++) {
|
||||
resolveSuspense(global.__sxPending[pi].id, global.__sxPending[pi].sx);
|
||||
}
|
||||
global.__sxPending = null;
|
||||
}
|
||||
// Set up direct resolution for future chunks
|
||||
global.__sxResolve = function(id, sx) { resolveSuspense(id, sx); };
|
||||
};
|
||||
if (document.readyState === "loading") {
|
||||
document.addEventListener("DOMContentLoaded", _sxInit);
|
||||
} else {
|
||||
|
||||
@@ -854,6 +854,7 @@ ADAPTER_FILES = {
|
||||
SPEC_MODULES = {
|
||||
"deps": ("deps.sx", "deps (component dependency analysis)"),
|
||||
"router": ("router.sx", "router (client-side route matching)"),
|
||||
"engine": ("engine.sx", "engine (fetch/swap/trigger pure logic)"),
|
||||
}
|
||||
|
||||
|
||||
@@ -958,7 +959,7 @@ def compile_ref_to_py(
|
||||
Valid names: continuations.
|
||||
None = no extensions.
|
||||
spec_modules: List of spec module names to include.
|
||||
Valid names: deps.
|
||||
Valid names: deps, engine.
|
||||
None = no spec modules.
|
||||
"""
|
||||
# Determine which primitive modules to include
|
||||
@@ -1254,11 +1255,15 @@ def make_page_def(name, slots, env):
|
||||
if isinstance(layout, Keyword):
|
||||
layout = layout.name
|
||||
cache = None
|
||||
stream_val = slots.get("stream")
|
||||
stream = bool(trampoline(eval_expr(stream_val, env))) if stream_val is not None else False
|
||||
return PageDef(
|
||||
name=name, path=path, auth=auth, layout=layout, cache=cache,
|
||||
data_expr=slots.get("data"), content_expr=slots.get("content"),
|
||||
filter_expr=slots.get("filter"), aside_expr=slots.get("aside"),
|
||||
menu_expr=slots.get("menu"), closure=dict(env),
|
||||
menu_expr=slots.get("menu"), stream=stream,
|
||||
fallback_expr=slots.get("fallback"), shell_expr=slots.get("shell"),
|
||||
closure=dict(env),
|
||||
)
|
||||
|
||||
|
||||
@@ -1789,7 +1794,7 @@ PRIMITIVES["concat"] = lambda *args: _b_sum((a for a in args if a), [])
|
||||
PRIMITIVES["list"] = lambda *args: _b_list(args)
|
||||
PRIMITIVES["dict"] = lambda *args: {args[i]: args[i+1] for i in _b_range(0, _b_len(args)-1, 2)}
|
||||
PRIMITIVES["range"] = lambda a, b, step=1: _b_list(_b_range(_b_int(a), _b_int(b), _b_int(step)))
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else default)
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else (c.get(k, default) if hasattr(c, 'get') else default))
|
||||
PRIMITIVES["len"] = lambda c: _b_len(c) if c is not None and c is not NIL else 0
|
||||
PRIMITIVES["first"] = lambda c: c[0] if c and _b_len(c) > 0 else NIL
|
||||
PRIMITIVES["last"] = lambda c: c[-1] if c and _b_len(c) > 0 else NIL
|
||||
@@ -1806,6 +1811,7 @@ PRIMITIVES["zip-pairs"] = lambda c: [[c[i], c[i+1]] for i in _b_range(_b_len(c)-
|
||||
PRIMITIVES["keys"] = lambda d: _b_list((d or {}).keys())
|
||||
PRIMITIVES["vals"] = lambda d: _b_list((d or {}).values())
|
||||
PRIMITIVES["merge"] = lambda *args: _sx_merge_dicts(*args)
|
||||
PRIMITIVES["has-key?"] = lambda d, k: isinstance(d, _b_dict) and k in d
|
||||
PRIMITIVES["assoc"] = lambda d, *kvs: _sx_assoc(d, *kvs)
|
||||
PRIMITIVES["dissoc"] = lambda d, *ks: {k: v for k, v in d.items() if k not in ks}
|
||||
PRIMITIVES["into"] = lambda target, coll: (_b_list(coll) if isinstance(target, _b_list) else {p[0]: p[1] for p in coll if isinstance(p, _b_list) and _b_len(p) >= 2})
|
||||
@@ -1832,10 +1838,15 @@ PRIMITIVES["parse-int"] = lambda v, d=0: _sx_parse_int(v, d)
|
||||
PRIMITIVES["parse-datetime"] = lambda s: str(s) if s else NIL
|
||||
|
||||
def _sx_parse_int(v, default=0):
|
||||
try:
|
||||
return _b_int(v)
|
||||
except (ValueError, TypeError):
|
||||
if v is None or v is NIL:
|
||||
return default
|
||||
s = str(v).strip()
|
||||
# Match JS parseInt: extract leading integer portion
|
||||
import re as _re
|
||||
m = _re.match(r'^[+-]?\\d+', s)
|
||||
if m:
|
||||
return _b_int(m.group())
|
||||
return default
|
||||
''',
|
||||
|
||||
"stdlib.text": '''
|
||||
@@ -1976,6 +1987,12 @@ concat = PRIMITIVES["concat"]
|
||||
split = PRIMITIVES["split"]
|
||||
length = PRIMITIVES["len"]
|
||||
merge = PRIMITIVES["merge"]
|
||||
trim = PRIMITIVES["trim"]
|
||||
replace = PRIMITIVES["replace"]
|
||||
parse_int = PRIMITIVES["parse-int"]
|
||||
upper = PRIMITIVES["upper"]
|
||||
has_key_p = PRIMITIVES["has-key?"]
|
||||
dissoc = PRIMITIVES["dissoc"]
|
||||
'''
|
||||
|
||||
|
||||
@@ -2189,7 +2206,7 @@ def main():
|
||||
parser.add_argument(
|
||||
"--spec-modules",
|
||||
default=None,
|
||||
help="Comma-separated spec modules (deps). Default: none.",
|
||||
help="Comma-separated spec modules (deps,engine). Default: none.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
adapters = args.adapters.split(",") if args.adapters else None
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
;; ==========================================================================
|
||||
;; boundary-app.sx — Deployment-specific boundary declarations
|
||||
;;
|
||||
;; Layout context I/O primitives for THIS deployment's service architecture.
|
||||
;; I/O primitives specific to THIS deployment's architecture:
|
||||
;; inter-service communication, framework bindings, domain concepts,
|
||||
;; and layout context providers.
|
||||
;;
|
||||
;; These are NOT part of the SX language contract — a different deployment
|
||||
;; would declare different layout contexts here.
|
||||
;; would declare different primitives here.
|
||||
;;
|
||||
;; The core SX I/O contract lives in boundary.sx.
|
||||
;; Per-service page helpers live in {service}/sx/boundary.sx.
|
||||
@@ -11,7 +14,92 @@
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Layout context providers — deployment-specific I/O
|
||||
;; Inter-service communication — microservice architecture
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(define-io-primitive "frag"
|
||||
:params (service frag-type &key)
|
||||
:returns "string"
|
||||
:async true
|
||||
:doc "Fetch cross-service HTML fragment."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "query"
|
||||
:params (service query-name &key)
|
||||
:returns "any"
|
||||
:async true
|
||||
:doc "Fetch data from another service via internal HTTP."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "action"
|
||||
:params (service action-name &key)
|
||||
:returns "any"
|
||||
:async true
|
||||
:doc "Call an action on another service via internal HTTP."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "service"
|
||||
:params (service-or-method &rest args &key)
|
||||
:returns "any"
|
||||
:async true
|
||||
:doc "Call a domain service method. Two-arg: (service svc method). One-arg: (service method) uses bound handler service."
|
||||
:context :request)
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Framework bindings — Quart/Jinja2/HTMX specifics
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(define-io-primitive "htmx-request?"
|
||||
:params ()
|
||||
:returns "boolean"
|
||||
:async true
|
||||
:doc "True if current request has HX-Request header."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "g"
|
||||
:params (key)
|
||||
:returns "any"
|
||||
:async true
|
||||
:doc "Read a value from the Quart request-local g object."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "jinja-global"
|
||||
:params (key &rest default)
|
||||
:returns "any"
|
||||
:async false
|
||||
:doc "Read a Jinja environment global."
|
||||
:context :request)
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Domain concepts — navigation, relations
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(define-io-primitive "nav-tree"
|
||||
:params ()
|
||||
:returns "list"
|
||||
:async true
|
||||
:doc "Navigation tree as list of node dicts."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "get-children"
|
||||
:params (&key parent-type parent-id)
|
||||
:returns "list"
|
||||
:async true
|
||||
:doc "Fetch child entities for a parent."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "relations-from"
|
||||
:params (entity-type)
|
||||
:returns "list"
|
||||
:async false
|
||||
:doc "List of RelationDef dicts for an entity type."
|
||||
:context :config)
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Layout context providers — per-service header/page context
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
;; Shared across all services (root layout)
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
;; This is the LANGUAGE contract — not deployment-specific.
|
||||
;;
|
||||
;; Pure primitives (Tier 1) are declared in primitives.sx.
|
||||
;; Deployment-specific I/O (layout contexts) lives in boundary-app.sx.
|
||||
;; Deployment-specific I/O lives in boundary-app.sx.
|
||||
;; Per-service page helpers live in {service}/sx/boundary.sx.
|
||||
;;
|
||||
;; Format:
|
||||
@@ -28,38 +28,11 @@
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Tier 2: Core I/O primitives — async, side-effectful, need host context
|
||||
;;
|
||||
;; These are generic web-platform I/O that any SX web host would provide,
|
||||
;; regardless of deployment architecture.
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
;; Cross-service communication
|
||||
|
||||
(define-io-primitive "frag"
|
||||
:params (service frag-type &key)
|
||||
:returns "string"
|
||||
:async true
|
||||
:doc "Fetch cross-service HTML fragment."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "query"
|
||||
:params (service query-name &key)
|
||||
:returns "any"
|
||||
:async true
|
||||
:doc "Fetch data from another service via internal HTTP."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "action"
|
||||
:params (service action-name &key)
|
||||
:returns "any"
|
||||
:async true
|
||||
:doc "Call an action on another service via internal HTTP."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "service"
|
||||
:params (service-or-method &rest args &key)
|
||||
:returns "any"
|
||||
:async true
|
||||
:doc "Call a domain service method. Two-arg: (service svc method). One-arg: (service method) uses bound handler service."
|
||||
:context :request)
|
||||
|
||||
;; Request context
|
||||
|
||||
(define-io-primitive "current-user"
|
||||
@@ -69,13 +42,6 @@
|
||||
:doc "Current authenticated user dict, or nil."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "htmx-request?"
|
||||
:params ()
|
||||
:returns "boolean"
|
||||
:async true
|
||||
:doc "True if current request has HX-Request header."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "request-arg"
|
||||
:params (name &rest default)
|
||||
:returns "any"
|
||||
@@ -97,13 +63,6 @@
|
||||
:doc "Read a URL view argument from the current request."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "g"
|
||||
:params (key)
|
||||
:returns "any"
|
||||
:async true
|
||||
:doc "Read a value from the Quart request-local g object."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "csrf-token"
|
||||
:params ()
|
||||
:returns "string"
|
||||
@@ -134,22 +93,6 @@
|
||||
:doc "Service URL prefix for dev/prod routing."
|
||||
:context :request)
|
||||
|
||||
;; Navigation and relations
|
||||
|
||||
(define-io-primitive "nav-tree"
|
||||
:params ()
|
||||
:returns "list"
|
||||
:async true
|
||||
:doc "Navigation tree as list of node dicts."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "get-children"
|
||||
:params (&key parent-type parent-id)
|
||||
:returns "list"
|
||||
:async true
|
||||
:doc "Fetch child entities for a parent."
|
||||
:context :request)
|
||||
|
||||
;; Config and host context (sync — no await needed)
|
||||
|
||||
(define-io-primitive "app-url"
|
||||
@@ -170,21 +113,7 @@
|
||||
:params (key)
|
||||
:returns "any"
|
||||
:async false
|
||||
:doc "Read a value from app-config.yaml."
|
||||
:context :config)
|
||||
|
||||
(define-io-primitive "jinja-global"
|
||||
:params (key &rest default)
|
||||
:returns "any"
|
||||
:async false
|
||||
:doc "Read a Jinja environment global."
|
||||
:context :request)
|
||||
|
||||
(define-io-primitive "relations-from"
|
||||
:params (entity-type)
|
||||
:returns "list"
|
||||
:async false
|
||||
:doc "List of RelationDef dicts for an entity type."
|
||||
:doc "Read a value from host configuration."
|
||||
:context :config)
|
||||
|
||||
|
||||
|
||||
@@ -172,7 +172,7 @@
|
||||
(let ((mac (env-get env name)))
|
||||
(make-thunk (expand-macro mac args env) env))
|
||||
|
||||
;; Render expression — delegate to active adapter
|
||||
;; Render expression — delegate to active adapter.
|
||||
(is-render-expr? expr)
|
||||
(render-expr expr env)
|
||||
|
||||
|
||||
@@ -116,3 +116,114 @@
|
||||
(let ((pdef (make-page-def name slots env)))
|
||||
(env-set! env (str "page:" name) pdef)
|
||||
pdef))))
|
||||
|
||||
|
||||
;; ==========================================================================
|
||||
;; Page Execution Semantics
|
||||
;; ==========================================================================
|
||||
;;
|
||||
;; A PageDef describes what to render for a route. The host evaluates slots
|
||||
;; at request time. This section specifies the data → content protocol that
|
||||
;; every host must implement identically.
|
||||
;;
|
||||
;; Slots (all unevaluated AST):
|
||||
;; :path — route pattern (string)
|
||||
;; :auth — "public" | "login" | "admin"
|
||||
;; :layout — layout reference + kwargs
|
||||
;; :stream — boolean, opt into chunked transfer
|
||||
;; :shell — immediate content (contains ~suspense placeholders)
|
||||
;; :fallback — loading skeleton for single-stream mode
|
||||
;; :data — IO expression producing bindings
|
||||
;; :content — template expression evaluated with data bindings
|
||||
;; :filter, :aside, :menu — additional content slots
|
||||
;;
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Data Protocol
|
||||
;; --------------------------------------------------------------------------
|
||||
;;
|
||||
;; The :data expression is evaluated at request time. It returns one of:
|
||||
;;
|
||||
;; 1. A dict — single-stream mode (default).
|
||||
;; Each key becomes an env binding (underscores → hyphens).
|
||||
;; Then :content is evaluated once with those bindings.
|
||||
;; Result resolves the "stream-content" suspense slot.
|
||||
;;
|
||||
;; 2. A sequence of dicts — multi-stream mode.
|
||||
;; The host delivers items over time (async generator, channel, etc.).
|
||||
;; Each dict:
|
||||
;; - MUST contain "stream-id" → string matching a ~suspense :id
|
||||
;; - Remaining keys become env bindings (underscores → hyphens)
|
||||
;; - :content is re-evaluated with those bindings
|
||||
;; - Result resolves the ~suspense slot matching "stream-id"
|
||||
;; If "stream-id" is absent, defaults to "stream-content".
|
||||
;;
|
||||
;; The host is free to choose the timing mechanism:
|
||||
;; Python — async generator (yield dicts at intervals)
|
||||
;; Go — channel of dicts
|
||||
;; Haskell — conduit / streaming
|
||||
;; JS — async iterator
|
||||
;;
|
||||
;; The spec requires:
|
||||
;; (a) Each item's bindings are isolated (fresh env per item)
|
||||
;; (b) :content is evaluated independently for each item
|
||||
;; (c) Resolution is incremental — each item resolves as it arrives
|
||||
;; (d) "stream-id" routes to the correct ~suspense slot
|
||||
;;
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Streaming Execution Order
|
||||
;; --------------------------------------------------------------------------
|
||||
;;
|
||||
;; When :stream is true:
|
||||
;;
|
||||
;; 1. Evaluate :shell (if present) → HTML for immediate content slot
|
||||
;; :shell typically contains ~suspense placeholders with :fallback
|
||||
;; 2. Render HTML shell with suspense placeholders → send to client
|
||||
;; 3. Start :data evaluation concurrently with header resolution
|
||||
;; 4. As each data item arrives:
|
||||
;; a. Bind item keys into fresh env
|
||||
;; b. Evaluate :content with those bindings → SX wire format
|
||||
;; c. Send resolve script: __sxResolve(stream-id, sx)
|
||||
;; 5. Close response when all items + headers have resolved
|
||||
;;
|
||||
;; Non-streaming pages evaluate :data then :content sequentially and
|
||||
;; return the complete page in a single response.
|
||||
;;
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Spec helpers for multi-stream data protocol
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
;; Extract stream-id from a data chunk dict, defaulting to "stream-content"
|
||||
(define stream-chunk-id
|
||||
(fn (chunk)
|
||||
(if (has-key? chunk "stream-id")
|
||||
(get chunk "stream-id")
|
||||
"stream-content")))
|
||||
|
||||
;; Remove stream-id from chunk, returning only the bindings
|
||||
(define stream-chunk-bindings
|
||||
(fn (chunk)
|
||||
(dissoc chunk "stream-id")))
|
||||
|
||||
;; Normalize binding keys: underscore → hyphen
|
||||
(define normalize-binding-key
|
||||
(fn (key)
|
||||
(replace key "_" "-")))
|
||||
|
||||
;; Bind a data chunk's keys into a fresh env (isolated per chunk)
|
||||
(define bind-stream-chunk
|
||||
(fn (chunk base-env)
|
||||
(let ((env (merge {} base-env))
|
||||
(bindings (stream-chunk-bindings chunk)))
|
||||
(for-each
|
||||
(fn (key)
|
||||
(env-set! env (normalize-binding-key key)
|
||||
(get bindings key)))
|
||||
(keys bindings))
|
||||
env)))
|
||||
|
||||
;; Validate a multi-stream data result: must be a list of dicts
|
||||
(define validate-stream-data
|
||||
(fn (data)
|
||||
(and (= (type-of data) "list")
|
||||
(every? (fn (item) (= (type-of item) "dict")) data))))
|
||||
|
||||
@@ -593,7 +593,14 @@
|
||||
;; Client-side routing
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
;; No app-specific nav update here — apps handle sx:clientRoute event.
|
||||
(define current-page-layout
|
||||
(fn ()
|
||||
;; Find the layout name of the currently displayed page by matching
|
||||
;; the browser URL against the page route table.
|
||||
(let ((pathname (url-pathname (browser-location-href)))
|
||||
(match (find-matching-route pathname _page-routes)))
|
||||
(if (nil? match) ""
|
||||
(or (get match "layout") "")))))
|
||||
|
||||
|
||||
(define swap-rendered-content
|
||||
@@ -634,24 +641,38 @@
|
||||
;; Try to render a page client-side. Returns true if successful, false otherwise.
|
||||
;; target-sel is the CSS selector for the swap target (from sx-boost value).
|
||||
;; For pure pages: renders immediately. For :data pages: fetches data then renders.
|
||||
;; Falls through to server when layout changes (needs OOB header update).
|
||||
(let ((match (find-matching-route pathname _page-routes)))
|
||||
(if (nil? match)
|
||||
(do (log-info (str "sx:route no match (" (len _page-routes) " routes) " pathname)) false)
|
||||
(let ((content-src (get match "content"))
|
||||
(closure (or (get match "closure") {}))
|
||||
(params (get match "params"))
|
||||
(page-name (get match "name")))
|
||||
(if (or (nil? content-src) (empty? content-src))
|
||||
(do (log-warn (str "sx:route no content for " pathname)) false)
|
||||
(let ((target (resolve-route-target target-sel)))
|
||||
(if (nil? target)
|
||||
(do (log-warn (str "sx:route target not found: " target-sel)) false)
|
||||
(if (not (deps-satisfied? match))
|
||||
(do (log-info (str "sx:route deps miss for " page-name)) false)
|
||||
(let ((target-layout (or (get match "layout") ""))
|
||||
(cur-layout (current-page-layout)))
|
||||
(if (not (= target-layout cur-layout))
|
||||
(do (log-info (str "sx:route server (layout: " cur-layout " -> " target-layout ") " pathname)) false)
|
||||
(let ((content-src (get match "content"))
|
||||
(closure (or (get match "closure") {}))
|
||||
(params (get match "params"))
|
||||
(page-name (get match "name")))
|
||||
(if (or (nil? content-src) (empty? content-src))
|
||||
(do (log-warn (str "sx:route no content for " pathname)) false)
|
||||
(let ((target (resolve-route-target target-sel)))
|
||||
(if (nil? target)
|
||||
(do (log-warn (str "sx:route target not found: " target-sel)) false)
|
||||
(if (not (deps-satisfied? match))
|
||||
(do (log-info (str "sx:route deps miss for " page-name)) false)
|
||||
(let ((io-deps (get match "io-deps"))
|
||||
(has-io (and io-deps (not (empty? io-deps)))))
|
||||
;; Ensure IO deps are registered as proxied primitives
|
||||
(when has-io (register-io-deps io-deps))
|
||||
(if (get match "stream")
|
||||
;; Streaming page: fetch with streaming body reader.
|
||||
;; First chunk = OOB SX swap (shell with skeletons),
|
||||
;; subsequent chunks = resolve scripts filling slots.
|
||||
(do (log-info (str "sx:route streaming " pathname))
|
||||
(fetch-streaming target pathname
|
||||
(build-request-headers target
|
||||
(loaded-component-names) _css-hash))
|
||||
true)
|
||||
(if (get match "has-data")
|
||||
;; Data page: check cache, else resolve asynchronously
|
||||
(let ((cache-key (page-data-cache-key page-name params))
|
||||
@@ -715,7 +736,7 @@
|
||||
(do (log-info (str "sx:route server (eval failed) " pathname)) false)
|
||||
(do
|
||||
(swap-rendered-content target rendered pathname)
|
||||
true)))))))))))))))
|
||||
true))))))))))))))))))
|
||||
|
||||
|
||||
(define bind-client-route-link
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# WARNING: special-forms.sx declares forms not in eval.sx: reset, shift
|
||||
"""
|
||||
sx_ref.py -- Generated from reference SX evaluator specification.
|
||||
|
||||
@@ -191,11 +192,15 @@ def make_page_def(name, slots, env):
|
||||
if isinstance(layout, Keyword):
|
||||
layout = layout.name
|
||||
cache = None
|
||||
stream_val = slots.get("stream")
|
||||
stream = bool(trampoline(eval_expr(stream_val, env))) if stream_val is not None else False
|
||||
return PageDef(
|
||||
name=name, path=path, auth=auth, layout=layout, cache=cache,
|
||||
data_expr=slots.get("data"), content_expr=slots.get("content"),
|
||||
filter_expr=slots.get("filter"), aside_expr=slots.get("aside"),
|
||||
menu_expr=slots.get("menu"), closure=dict(env),
|
||||
menu_expr=slots.get("menu"), stream=stream,
|
||||
fallback_expr=slots.get("fallback"), shell_expr=slots.get("shell"),
|
||||
closure=dict(env),
|
||||
)
|
||||
|
||||
|
||||
@@ -736,7 +741,7 @@ PRIMITIVES["concat"] = lambda *args: _b_sum((a for a in args if a), [])
|
||||
PRIMITIVES["list"] = lambda *args: _b_list(args)
|
||||
PRIMITIVES["dict"] = lambda *args: {args[i]: args[i+1] for i in _b_range(0, _b_len(args)-1, 2)}
|
||||
PRIMITIVES["range"] = lambda a, b, step=1: _b_list(_b_range(_b_int(a), _b_int(b), _b_int(step)))
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else default)
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else (c.get(k, default) if hasattr(c, 'get') else default))
|
||||
PRIMITIVES["len"] = lambda c: _b_len(c) if c is not None and c is not NIL else 0
|
||||
PRIMITIVES["first"] = lambda c: c[0] if c and _b_len(c) > 0 else NIL
|
||||
PRIMITIVES["last"] = lambda c: c[-1] if c and _b_len(c) > 0 else NIL
|
||||
@@ -752,6 +757,7 @@ PRIMITIVES["zip-pairs"] = lambda c: [[c[i], c[i+1]] for i in _b_range(_b_len(c)-
|
||||
PRIMITIVES["keys"] = lambda d: _b_list((d or {}).keys())
|
||||
PRIMITIVES["vals"] = lambda d: _b_list((d or {}).values())
|
||||
PRIMITIVES["merge"] = lambda *args: _sx_merge_dicts(*args)
|
||||
PRIMITIVES["has-key?"] = lambda d, k: isinstance(d, _b_dict) and k in d
|
||||
PRIMITIVES["assoc"] = lambda d, *kvs: _sx_assoc(d, *kvs)
|
||||
PRIMITIVES["dissoc"] = lambda d, *ks: {k: v for k, v in d.items() if k not in ks}
|
||||
PRIMITIVES["into"] = lambda target, coll: (_b_list(coll) if isinstance(target, _b_list) else {p[0]: p[1] for p in coll if isinstance(p, _b_list) and _b_len(p) >= 2})
|
||||
@@ -777,10 +783,15 @@ PRIMITIVES["parse-int"] = lambda v, d=0: _sx_parse_int(v, d)
|
||||
PRIMITIVES["parse-datetime"] = lambda s: str(s) if s else NIL
|
||||
|
||||
def _sx_parse_int(v, default=0):
|
||||
try:
|
||||
return _b_int(v)
|
||||
except (ValueError, TypeError):
|
||||
if v is None or v is NIL:
|
||||
return default
|
||||
s = str(v).strip()
|
||||
# Match JS parseInt: extract leading integer portion
|
||||
import re as _re
|
||||
m = _re.match(r'^[+-]?\d+', s)
|
||||
if m:
|
||||
return _b_int(m.group())
|
||||
return default
|
||||
|
||||
|
||||
# stdlib.text
|
||||
@@ -879,6 +890,12 @@ concat = PRIMITIVES["concat"]
|
||||
split = PRIMITIVES["split"]
|
||||
length = PRIMITIVES["len"]
|
||||
merge = PRIMITIVES["merge"]
|
||||
trim = PRIMITIVES["trim"]
|
||||
replace = PRIMITIVES["replace"]
|
||||
parse_int = PRIMITIVES["parse-int"]
|
||||
upper = PRIMITIVES["upper"]
|
||||
has_key_p = PRIMITIVES["has-key?"]
|
||||
dissoc = PRIMITIVES["dissoc"]
|
||||
|
||||
|
||||
# =========================================================================
|
||||
@@ -1119,6 +1136,21 @@ sf_defaction = lambda args, env: (lambda name_sym: (lambda params_raw: (lambda n
|
||||
# sf-defpage
|
||||
sf_defpage = lambda args, env: (lambda name_sym: (lambda name: (lambda slots: _sx_begin((lambda i: (lambda max_i: for_each(lambda idx: ((_sx_dict_set(slots, keyword_name(nth(args, idx)), nth(args, (idx + 1))) if sx_truthy(((idx + 1) < max_i)) else NIL) if sx_truthy(((idx < max_i) if not sx_truthy((idx < max_i)) else (type_of(nth(args, idx)) == 'keyword'))) else NIL), range(1, max_i, 2)))(len(args)))(1), (lambda pdef: _sx_begin(_sx_dict_set(env, sx_str('page:', name), pdef), pdef))(make_page_def(name, slots, env))))({}))(symbol_name(name_sym)))(first(args))
|
||||
|
||||
# stream-chunk-id
|
||||
stream_chunk_id = lambda chunk: (get(chunk, 'stream-id') if sx_truthy(has_key_p(chunk, 'stream-id')) else 'stream-content')
|
||||
|
||||
# stream-chunk-bindings
|
||||
stream_chunk_bindings = lambda chunk: dissoc(chunk, 'stream-id')
|
||||
|
||||
# normalize-binding-key
|
||||
normalize_binding_key = lambda key: replace(key, '_', '-')
|
||||
|
||||
# bind-stream-chunk
|
||||
bind_stream_chunk = lambda chunk, base_env: (lambda env: (lambda bindings: _sx_begin(for_each(lambda key: _sx_dict_set(env, normalize_binding_key(key), get(bindings, key)), keys(bindings)), env))(stream_chunk_bindings(chunk)))(merge({}, base_env))
|
||||
|
||||
# validate-stream-data
|
||||
validate_stream_data = lambda data: ((type_of(data) == 'list') if not sx_truthy((type_of(data) == 'list')) else every_p(lambda item: (type_of(item) == 'dict'), data))
|
||||
|
||||
|
||||
# === Transpiled from render (core) ===
|
||||
|
||||
@@ -1252,13 +1284,142 @@ compute_all_io_refs = lambda env, io_names: for_each(lambda name: (lambda val: (
|
||||
component_pure_p = lambda name, env, io_names: empty_p(transitive_io_refs(name, env, io_names))
|
||||
|
||||
|
||||
# === Transpiled from engine (fetch/swap/trigger pure logic) ===
|
||||
|
||||
# ENGINE_VERBS
|
||||
ENGINE_VERBS = ['get', 'post', 'put', 'delete', 'patch']
|
||||
|
||||
# DEFAULT_SWAP
|
||||
DEFAULT_SWAP = 'outerHTML'
|
||||
|
||||
# parse-time
|
||||
parse_time = lambda s: (0 if sx_truthy(is_nil(s)) else (parse_int(s, 0) if sx_truthy(ends_with_p(s, 'ms')) else ((parse_int(replace(s, 's', ''), 0) * 1000) if sx_truthy(ends_with_p(s, 's')) else parse_int(s, 0))))
|
||||
|
||||
# parse-trigger-spec
|
||||
parse_trigger_spec = lambda spec: (NIL if sx_truthy(is_nil(spec)) else (lambda raw_parts: filter(lambda x: (not sx_truthy(is_nil(x))), map(lambda part: (lambda tokens: (NIL if sx_truthy(empty_p(tokens)) else ({'event': 'every', 'modifiers': {'interval': parse_time(nth(tokens, 1))}} if sx_truthy(((first(tokens) == 'every') if not sx_truthy((first(tokens) == 'every')) else (len(tokens) >= 2))) else (lambda mods: _sx_begin(for_each(lambda tok: (_sx_dict_set(mods, 'once', True) if sx_truthy((tok == 'once')) else (_sx_dict_set(mods, 'changed', True) if sx_truthy((tok == 'changed')) else (_sx_dict_set(mods, 'delay', parse_time(slice(tok, 6))) if sx_truthy(starts_with_p(tok, 'delay:')) else (_sx_dict_set(mods, 'from', slice(tok, 5)) if sx_truthy(starts_with_p(tok, 'from:')) else NIL)))), rest(tokens)), {'event': first(tokens), 'modifiers': mods}))({}))))(split(trim(part), ' ')), raw_parts)))(split(spec, ',')))
|
||||
|
||||
# default-trigger
|
||||
default_trigger = lambda tag_name: ([{'event': 'submit', 'modifiers': {}}] if sx_truthy((tag_name == 'FORM')) else ([{'event': 'change', 'modifiers': {}}] if sx_truthy(((tag_name == 'INPUT') if sx_truthy((tag_name == 'INPUT')) else ((tag_name == 'SELECT') if sx_truthy((tag_name == 'SELECT')) else (tag_name == 'TEXTAREA')))) else [{'event': 'click', 'modifiers': {}}]))
|
||||
|
||||
# get-verb-info
|
||||
get_verb_info = lambda el: some(lambda verb: (lambda url: ({'method': upper(verb), 'url': url} if sx_truthy(url) else NIL))(dom_get_attr(el, sx_str('sx-', verb))), ENGINE_VERBS)
|
||||
|
||||
# build-request-headers
|
||||
build_request_headers = lambda el, loaded_components, css_hash: (lambda headers: _sx_begin((lambda target_sel: (_sx_dict_set(headers, 'SX-Target', target_sel) if sx_truthy(target_sel) else NIL))(dom_get_attr(el, 'sx-target')), (_sx_dict_set(headers, 'SX-Components', join(',', loaded_components)) if sx_truthy((not sx_truthy(empty_p(loaded_components)))) else NIL), (_sx_dict_set(headers, 'SX-Css', css_hash) if sx_truthy(css_hash) else NIL), (lambda extra_h: ((lambda parsed: (for_each(lambda key: _sx_dict_set(headers, key, sx_str(get(parsed, key))), keys(parsed)) if sx_truthy(parsed) else NIL))(parse_header_value(extra_h)) if sx_truthy(extra_h) else NIL))(dom_get_attr(el, 'sx-headers')), headers))({'SX-Request': 'true', 'SX-Current-URL': browser_location_href()})
|
||||
|
||||
# process-response-headers
|
||||
process_response_headers = lambda get_header: {'redirect': get_header('SX-Redirect'), 'refresh': get_header('SX-Refresh'), 'trigger': get_header('SX-Trigger'), 'retarget': get_header('SX-Retarget'), 'reswap': get_header('SX-Reswap'), 'location': get_header('SX-Location'), 'replace-url': get_header('SX-Replace-Url'), 'css-hash': get_header('SX-Css-Hash'), 'trigger-swap': get_header('SX-Trigger-After-Swap'), 'trigger-settle': get_header('SX-Trigger-After-Settle'), 'content-type': get_header('Content-Type')}
|
||||
|
||||
# parse-swap-spec
|
||||
def parse_swap_spec(raw_swap, global_transitions_p):
|
||||
_cells = {}
|
||||
parts = split((raw_swap if sx_truthy(raw_swap) else DEFAULT_SWAP), ' ')
|
||||
style = first(parts)
|
||||
_cells['use_transition'] = global_transitions_p
|
||||
for p in rest(parts):
|
||||
if sx_truthy((p == 'transition:true')):
|
||||
_cells['use_transition'] = True
|
||||
elif sx_truthy((p == 'transition:false')):
|
||||
_cells['use_transition'] = False
|
||||
return {'style': style, 'transition': _cells['use_transition']}
|
||||
|
||||
# parse-retry-spec
|
||||
parse_retry_spec = lambda retry_attr: (NIL if sx_truthy(is_nil(retry_attr)) else (lambda parts: {'strategy': first(parts), 'start-ms': parse_int(nth(parts, 1), 1000), 'cap-ms': parse_int(nth(parts, 2), 30000)})(split(retry_attr, ':')))
|
||||
|
||||
# next-retry-ms
|
||||
next_retry_ms = lambda current_ms, cap_ms: min((current_ms * 2), cap_ms)
|
||||
|
||||
# filter-params
|
||||
filter_params = lambda params_spec, all_params: (all_params if sx_truthy(is_nil(params_spec)) else ([] if sx_truthy((params_spec == 'none')) else (all_params if sx_truthy((params_spec == '*')) else ((lambda excluded: filter(lambda p: (not sx_truthy(contains_p(excluded, first(p)))), all_params))(map(trim, split(slice(params_spec, 4), ','))) if sx_truthy(starts_with_p(params_spec, 'not ')) else (lambda allowed: filter(lambda p: contains_p(allowed, first(p)), all_params))(map(trim, split(params_spec, ',')))))))
|
||||
|
||||
# resolve-target
|
||||
resolve_target = lambda el: (lambda sel: (el if sx_truthy((is_nil(sel) if sx_truthy(is_nil(sel)) else (sel == 'this'))) else (dom_parent(el) if sx_truthy((sel == 'closest')) else dom_query(sel))))(dom_get_attr(el, 'sx-target'))
|
||||
|
||||
# apply-optimistic
|
||||
apply_optimistic = lambda el: (lambda directive: (NIL if sx_truthy(is_nil(directive)) else (lambda target: (lambda state: _sx_begin((_sx_begin(_sx_dict_set(state, 'opacity', dom_get_style(target, 'opacity')), dom_set_style(target, 'opacity', '0'), dom_set_style(target, 'pointer-events', 'none')) if sx_truthy((directive == 'remove')) else (_sx_begin(_sx_dict_set(state, 'disabled', dom_get_prop(target, 'disabled')), dom_set_prop(target, 'disabled', True)) if sx_truthy((directive == 'disable')) else ((lambda cls: _sx_begin(_sx_dict_set(state, 'add-class', cls), dom_add_class(target, cls)))(slice(directive, 10)) if sx_truthy(starts_with_p(directive, 'add-class:')) else NIL))), state))({'target': target, 'directive': directive}))((resolve_target(el) if sx_truthy(resolve_target(el)) else el))))(dom_get_attr(el, 'sx-optimistic'))
|
||||
|
||||
# revert-optimistic
|
||||
revert_optimistic = lambda state: ((lambda target: (lambda directive: (_sx_begin(dom_set_style(target, 'opacity', (get(state, 'opacity') if sx_truthy(get(state, 'opacity')) else '')), dom_set_style(target, 'pointer-events', '')) if sx_truthy((directive == 'remove')) else (dom_set_prop(target, 'disabled', (get(state, 'disabled') if sx_truthy(get(state, 'disabled')) else False)) if sx_truthy((directive == 'disable')) else (dom_remove_class(target, get(state, 'add-class')) if sx_truthy(get(state, 'add-class')) else NIL))))(get(state, 'directive')))(get(state, 'target')) if sx_truthy(state) else NIL)
|
||||
|
||||
# find-oob-swaps
|
||||
find_oob_swaps = lambda container: (lambda results: _sx_begin(for_each(lambda attr: (lambda oob_els: for_each(lambda oob: (lambda swap_type: (lambda target_id: _sx_begin(dom_remove_attr(oob, attr), (_sx_append(results, {'element': oob, 'swap-type': swap_type, 'target-id': target_id}) if sx_truthy(target_id) else NIL)))(dom_id(oob)))((dom_get_attr(oob, attr) if sx_truthy(dom_get_attr(oob, attr)) else 'outerHTML')), oob_els))(dom_query_all(container, sx_str('[', attr, ']'))), ['sx-swap-oob', 'hx-swap-oob']), results))([])
|
||||
|
||||
# morph-node
|
||||
morph_node = lambda old_node, new_node: (NIL if sx_truthy((dom_has_attr_p(old_node, 'sx-preserve') if sx_truthy(dom_has_attr_p(old_node, 'sx-preserve')) else dom_has_attr_p(old_node, 'sx-ignore'))) else (dom_replace_child(dom_parent(old_node), dom_clone(new_node), old_node) if sx_truthy(((not sx_truthy((dom_node_type(old_node) == dom_node_type(new_node)))) if sx_truthy((not sx_truthy((dom_node_type(old_node) == dom_node_type(new_node))))) else (not sx_truthy((dom_node_name(old_node) == dom_node_name(new_node)))))) else ((dom_set_text_content(old_node, dom_text_content(new_node)) if sx_truthy((not sx_truthy((dom_text_content(old_node) == dom_text_content(new_node))))) else NIL) if sx_truthy(((dom_node_type(old_node) == 3) if sx_truthy((dom_node_type(old_node) == 3)) else (dom_node_type(old_node) == 8))) else (_sx_begin(sync_attrs(old_node, new_node), (morph_children(old_node, new_node) if sx_truthy((not sx_truthy((dom_is_active_element_p(old_node) if not sx_truthy(dom_is_active_element_p(old_node)) else dom_is_input_element_p(old_node))))) else NIL)) if sx_truthy((dom_node_type(old_node) == 1)) else NIL))))
|
||||
|
||||
# sync-attrs
|
||||
sync_attrs = _sx_fn(lambda old_el, new_el: (
|
||||
for_each(lambda attr: (lambda name: (lambda val: (dom_set_attr(old_el, name, val) if sx_truthy((not sx_truthy((dom_get_attr(old_el, name) == val)))) else NIL))(nth(attr, 1)))(first(attr)), dom_attr_list(new_el)),
|
||||
for_each(lambda attr: (dom_remove_attr(old_el, first(attr)) if sx_truthy((not sx_truthy(dom_has_attr_p(new_el, first(attr))))) else NIL), dom_attr_list(old_el))
|
||||
)[-1])
|
||||
|
||||
# morph-children
|
||||
def morph_children(old_parent, new_parent):
|
||||
_cells = {}
|
||||
old_kids = dom_child_list(old_parent)
|
||||
new_kids = dom_child_list(new_parent)
|
||||
old_by_id = reduce(lambda acc, kid: (lambda id: (_sx_begin(_sx_dict_set(acc, id, kid), acc) if sx_truthy(id) else acc))(dom_id(kid)), {}, old_kids)
|
||||
_cells['oi'] = 0
|
||||
for new_child in new_kids:
|
||||
match_id = dom_id(new_child)
|
||||
match_by_id = (dict_get(old_by_id, match_id) if sx_truthy(match_id) else NIL)
|
||||
if sx_truthy((match_by_id if not sx_truthy(match_by_id) else (not sx_truthy(is_nil(match_by_id))))):
|
||||
if sx_truthy(((_cells['oi'] < len(old_kids)) if not sx_truthy((_cells['oi'] < len(old_kids))) else (not sx_truthy((match_by_id == nth(old_kids, _cells['oi'])))))):
|
||||
dom_insert_before(old_parent, match_by_id, (nth(old_kids, _cells['oi']) if sx_truthy((_cells['oi'] < len(old_kids))) else NIL))
|
||||
morph_node(match_by_id, new_child)
|
||||
_cells['oi'] = (_cells['oi'] + 1)
|
||||
elif sx_truthy((_cells['oi'] < len(old_kids))):
|
||||
old_child = nth(old_kids, _cells['oi'])
|
||||
if sx_truthy((dom_id(old_child) if not sx_truthy(dom_id(old_child)) else (not sx_truthy(match_id)))):
|
||||
dom_insert_before(old_parent, dom_clone(new_child), old_child)
|
||||
else:
|
||||
morph_node(old_child, new_child)
|
||||
_cells['oi'] = (_cells['oi'] + 1)
|
||||
else:
|
||||
dom_append(old_parent, dom_clone(new_child))
|
||||
return for_each(lambda i: ((lambda leftover: (dom_remove_child(old_parent, leftover) if sx_truthy((dom_is_child_of_p(leftover, old_parent) if not sx_truthy(dom_is_child_of_p(leftover, old_parent)) else ((not sx_truthy(dom_has_attr_p(leftover, 'sx-preserve'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(leftover, 'sx-preserve')))) else (not sx_truthy(dom_has_attr_p(leftover, 'sx-ignore')))))) else NIL))(nth(old_kids, i)) if sx_truthy((i >= _cells['oi'])) else NIL), range(_cells['oi'], len(old_kids)))
|
||||
|
||||
# swap-dom-nodes
|
||||
swap_dom_nodes = lambda target, new_nodes, strategy: _sx_case(strategy, [('innerHTML', lambda: (morph_children(target, new_nodes) if sx_truthy(dom_is_fragment_p(new_nodes)) else (lambda wrapper: _sx_begin(dom_append(wrapper, new_nodes), morph_children(target, wrapper)))(dom_create_element('div', NIL)))), ('outerHTML', lambda: (lambda parent: _sx_begin(((lambda fc: (_sx_begin(morph_node(target, fc), (lambda sib: insert_remaining_siblings(parent, target, sib))(dom_next_sibling(fc))) if sx_truthy(fc) else dom_remove_child(parent, target)))(dom_first_child(new_nodes)) if sx_truthy(dom_is_fragment_p(new_nodes)) else morph_node(target, new_nodes)), parent))(dom_parent(target))), ('afterend', lambda: dom_insert_after(target, new_nodes)), ('beforeend', lambda: dom_append(target, new_nodes)), ('afterbegin', lambda: dom_prepend(target, new_nodes)), ('beforebegin', lambda: dom_insert_before(dom_parent(target), new_nodes, target)), ('delete', lambda: dom_remove_child(dom_parent(target), target)), ('none', lambda: NIL), (None, lambda: (morph_children(target, new_nodes) if sx_truthy(dom_is_fragment_p(new_nodes)) else (lambda wrapper: _sx_begin(dom_append(wrapper, new_nodes), morph_children(target, wrapper)))(dom_create_element('div', NIL))))])
|
||||
|
||||
# insert-remaining-siblings
|
||||
insert_remaining_siblings = lambda parent, ref_node, sib: ((lambda next: _sx_begin(dom_insert_after(ref_node, sib), insert_remaining_siblings(parent, sib, next)))(dom_next_sibling(sib)) if sx_truthy(sib) else NIL)
|
||||
|
||||
# swap-html-string
|
||||
swap_html_string = lambda target, html, strategy: _sx_case(strategy, [('innerHTML', lambda: dom_set_inner_html(target, html)), ('outerHTML', lambda: (lambda parent: _sx_begin(dom_insert_adjacent_html(target, 'afterend', html), dom_remove_child(parent, target), parent))(dom_parent(target))), ('afterend', lambda: dom_insert_adjacent_html(target, 'afterend', html)), ('beforeend', lambda: dom_insert_adjacent_html(target, 'beforeend', html)), ('afterbegin', lambda: dom_insert_adjacent_html(target, 'afterbegin', html)), ('beforebegin', lambda: dom_insert_adjacent_html(target, 'beforebegin', html)), ('delete', lambda: dom_remove_child(dom_parent(target), target)), ('none', lambda: NIL), (None, lambda: dom_set_inner_html(target, html))])
|
||||
|
||||
# handle-history
|
||||
handle_history = lambda el, url, resp_headers: (lambda push_url: (lambda replace_url: (lambda hdr_replace: (browser_replace_state(hdr_replace) if sx_truthy(hdr_replace) else (browser_push_state((url if sx_truthy((push_url == 'true')) else push_url)) if sx_truthy((push_url if not sx_truthy(push_url) else (not sx_truthy((push_url == 'false'))))) else (browser_replace_state((url if sx_truthy((replace_url == 'true')) else replace_url)) if sx_truthy((replace_url if not sx_truthy(replace_url) else (not sx_truthy((replace_url == 'false'))))) else NIL))))(get(resp_headers, 'replace-url')))(dom_get_attr(el, 'sx-replace-url')))(dom_get_attr(el, 'sx-push-url'))
|
||||
|
||||
# PRELOAD_TTL
|
||||
PRELOAD_TTL = 30000
|
||||
|
||||
# preload-cache-get
|
||||
preload_cache_get = lambda cache, url: (lambda entry: (NIL if sx_truthy(is_nil(entry)) else (_sx_begin(dict_delete(cache, url), NIL) if sx_truthy(((now_ms() - get(entry, 'timestamp')) > PRELOAD_TTL)) else _sx_begin(dict_delete(cache, url), entry))))(dict_get(cache, url))
|
||||
|
||||
# preload-cache-set
|
||||
preload_cache_set = lambda cache, url, text, content_type: _sx_dict_set(cache, url, {'text': text, 'content-type': content_type, 'timestamp': now_ms()})
|
||||
|
||||
# classify-trigger
|
||||
classify_trigger = lambda trigger: (lambda event: ('poll' if sx_truthy((event == 'every')) else ('intersect' if sx_truthy((event == 'intersect')) else ('load' if sx_truthy((event == 'load')) else ('revealed' if sx_truthy((event == 'revealed')) else 'event')))))(get(trigger, 'event'))
|
||||
|
||||
# should-boost-link?
|
||||
should_boost_link_p = lambda link: (lambda href: (href if not sx_truthy(href) else ((not sx_truthy(starts_with_p(href, '#'))) if not sx_truthy((not sx_truthy(starts_with_p(href, '#')))) else ((not sx_truthy(starts_with_p(href, 'javascript:'))) if not sx_truthy((not sx_truthy(starts_with_p(href, 'javascript:')))) else ((not sx_truthy(starts_with_p(href, 'mailto:'))) if not sx_truthy((not sx_truthy(starts_with_p(href, 'mailto:')))) else (browser_same_origin_p(href) if not sx_truthy(browser_same_origin_p(href)) else ((not sx_truthy(dom_has_attr_p(link, 'sx-get'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(link, 'sx-get')))) else ((not sx_truthy(dom_has_attr_p(link, 'sx-post'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(link, 'sx-post')))) else (not sx_truthy(dom_has_attr_p(link, 'sx-disable')))))))))))(dom_get_attr(link, 'href'))
|
||||
|
||||
# should-boost-form?
|
||||
should_boost_form_p = lambda form: ((not sx_truthy(dom_has_attr_p(form, 'sx-get'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(form, 'sx-get')))) else ((not sx_truthy(dom_has_attr_p(form, 'sx-post'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(form, 'sx-post')))) else (not sx_truthy(dom_has_attr_p(form, 'sx-disable')))))
|
||||
|
||||
# parse-sse-swap
|
||||
parse_sse_swap = lambda el: (dom_get_attr(el, 'sx-sse-swap') if sx_truthy(dom_get_attr(el, 'sx-sse-swap')) else 'message')
|
||||
|
||||
|
||||
# === Transpiled from router (client-side route matching) ===
|
||||
|
||||
# split-path-segments
|
||||
split_path_segments = lambda path: (lambda trimmed: (lambda trimmed2: ([] if sx_truthy(empty_p(trimmed2)) else split(trimmed2, '/')))((slice(trimmed, 0, (length(trimmed) - 1)) if sx_truthy(((not sx_truthy(empty_p(trimmed))) if not sx_truthy((not sx_truthy(empty_p(trimmed)))) else ends_with_p(trimmed, '/'))) else trimmed)))((slice(path, 1) if sx_truthy(starts_with_p(path, '/')) else path))
|
||||
split_path_segments = lambda path: (lambda trimmed: (lambda trimmed2: ([] if sx_truthy(empty_p(trimmed2)) else split(trimmed2, '/')))((slice(trimmed, 0, (len(trimmed) - 1)) if sx_truthy(((not sx_truthy(empty_p(trimmed))) if not sx_truthy((not sx_truthy(empty_p(trimmed)))) else ends_with_p(trimmed, '/'))) else trimmed)))((slice(path, 1) if sx_truthy(starts_with_p(path, '/')) else path))
|
||||
|
||||
# make-route-segment
|
||||
make_route_segment = lambda seg: ((lambda param_name: (lambda d: _sx_begin(_sx_dict_set(d, 'type', 'param'), _sx_dict_set(d, 'value', param_name), d))({}))(slice(seg, 1, (length(seg) - 1))) if sx_truthy((starts_with_p(seg, '<') if not sx_truthy(starts_with_p(seg, '<')) else ends_with_p(seg, '>'))) else (lambda d: _sx_begin(_sx_dict_set(d, 'type', 'literal'), _sx_dict_set(d, 'value', seg), d))({}))
|
||||
make_route_segment = lambda seg: ((lambda param_name: (lambda d: _sx_begin(_sx_dict_set(d, 'type', 'param'), _sx_dict_set(d, 'value', param_name), d))({}))(slice(seg, 1, (len(seg) - 1))) if sx_truthy((starts_with_p(seg, '<') if not sx_truthy(starts_with_p(seg, '<')) else ends_with_p(seg, '>'))) else (lambda d: _sx_begin(_sx_dict_set(d, 'type', 'literal'), _sx_dict_set(d, 'value', seg), d))({}))
|
||||
|
||||
# parse-route-pattern
|
||||
parse_route_pattern = lambda pattern: (lambda segments: map(make_route_segment, segments))(split_path_segments(pattern))
|
||||
@@ -1266,7 +1427,7 @@ parse_route_pattern = lambda pattern: (lambda segments: map(make_route_segment,
|
||||
# match-route-segments
|
||||
def match_route_segments(path_segs, parsed_segs):
|
||||
_cells = {}
|
||||
return (NIL if sx_truthy((not sx_truthy((length(path_segs) == length(parsed_segs))))) else (lambda params: _sx_begin(_sx_cell_set(_cells, 'matched', True), _sx_begin(for_each_indexed(lambda i, parsed_seg: ((lambda path_seg: (lambda seg_type: ((_sx_cell_set(_cells, 'matched', False) if sx_truthy((not sx_truthy((path_seg == get(parsed_seg, 'value'))))) else NIL) if sx_truthy((seg_type == 'literal')) else (_sx_dict_set(params, get(parsed_seg, 'value'), path_seg) if sx_truthy((seg_type == 'param')) else _sx_cell_set(_cells, 'matched', False))))(get(parsed_seg, 'type')))(nth(path_segs, i)) if sx_truthy(_cells['matched']) else NIL), parsed_segs), (params if sx_truthy(_cells['matched']) else NIL))))({}))
|
||||
return (NIL if sx_truthy((not sx_truthy((len(path_segs) == len(parsed_segs))))) else (lambda params: _sx_begin(_sx_cell_set(_cells, 'matched', True), _sx_begin(for_each_indexed(lambda i, parsed_seg: ((lambda path_seg: (lambda seg_type: ((_sx_cell_set(_cells, 'matched', False) if sx_truthy((not sx_truthy((path_seg == get(parsed_seg, 'value'))))) else NIL) if sx_truthy((seg_type == 'literal')) else (_sx_dict_set(params, get(parsed_seg, 'value'), path_seg) if sx_truthy((seg_type == 'param')) else _sx_cell_set(_cells, 'matched', False))))(get(parsed_seg, 'type')))(nth(path_segs, i)) if sx_truthy(_cells['matched']) else NIL), parsed_segs), (params if sx_truthy(_cells['matched']) else NIL))))({}))
|
||||
|
||||
# match-route
|
||||
match_route = lambda path, pattern: (lambda path_segs: (lambda parsed_segs: match_route_segments(path_segs, parsed_segs))(parse_route_pattern(pattern)))(split_path_segments(path))
|
||||
@@ -1318,64 +1479,6 @@ def _wrap_aser_outputs():
|
||||
aser_fragment = _aser_fragment_wrapped
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Extension: delimited continuations (shift/reset)
|
||||
# =========================================================================
|
||||
|
||||
_RESET_RESUME = [] # stack of resume values; empty = not resuming
|
||||
|
||||
_SPECIAL_FORM_NAMES = _SPECIAL_FORM_NAMES | frozenset(["reset", "shift"])
|
||||
|
||||
def sf_reset(args, env):
|
||||
"""(reset body) -- establish a continuation delimiter."""
|
||||
body = first(args)
|
||||
try:
|
||||
return trampoline(eval_expr(body, env))
|
||||
except _ShiftSignal as sig:
|
||||
def cont_fn(value=NIL):
|
||||
_RESET_RESUME.append(value)
|
||||
try:
|
||||
return trampoline(eval_expr(body, env))
|
||||
finally:
|
||||
_RESET_RESUME.pop()
|
||||
k = Continuation(cont_fn)
|
||||
sig_env = dict(sig.env)
|
||||
sig_env[sig.k_name] = k
|
||||
return trampoline(eval_expr(sig.body, sig_env))
|
||||
|
||||
def sf_shift(args, env):
|
||||
"""(shift k body) -- capture continuation to nearest reset."""
|
||||
if _RESET_RESUME:
|
||||
return _RESET_RESUME[-1]
|
||||
k_name = symbol_name(first(args))
|
||||
body = nth(args, 1)
|
||||
raise _ShiftSignal(k_name, body, env)
|
||||
|
||||
# Wrap eval_list to inject shift/reset dispatch
|
||||
_base_eval_list = eval_list
|
||||
def _eval_list_with_continuations(expr, env):
|
||||
head = first(expr)
|
||||
if type_of(head) == "symbol":
|
||||
name = symbol_name(head)
|
||||
args = rest(expr)
|
||||
if name == "reset":
|
||||
return sf_reset(args, env)
|
||||
if name == "shift":
|
||||
return sf_shift(args, env)
|
||||
return _base_eval_list(expr, env)
|
||||
eval_list = _eval_list_with_continuations
|
||||
|
||||
# Inject into aser_special
|
||||
_base_aser_special = aser_special
|
||||
def _aser_special_with_continuations(name, expr, env):
|
||||
if name == "reset":
|
||||
return sf_reset(expr[1:], env)
|
||||
if name == "shift":
|
||||
return sf_shift(expr[1:], env)
|
||||
return _base_aser_special(name, expr, env)
|
||||
aser_special = _aser_special_with_continuations
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Public API
|
||||
# =========================================================================
|
||||
|
||||
225
shared/sx/ref/test-deps.sx
Normal file
225
shared/sx/ref/test-deps.sx
Normal file
@@ -0,0 +1,225 @@
|
||||
;; ==========================================================================
|
||||
;; test-deps.sx — Tests for component dependency analysis (deps.sx)
|
||||
;;
|
||||
;; Requires: test-framework.sx loaded first.
|
||||
;; Platform functions: scan-refs, transitive-deps, components-needed,
|
||||
;; component-pure?, scan-io-refs, transitive-io-refs,
|
||||
;; scan-components-from-source, test-env
|
||||
;; (loaded from bootstrapped output by test runners)
|
||||
;; ==========================================================================
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Test component definitions — these exist in the test env for dep analysis
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defcomp ~dep-leaf ()
|
||||
(span "leaf"))
|
||||
|
||||
(defcomp ~dep-branch ()
|
||||
(div (~dep-leaf)))
|
||||
|
||||
(defcomp ~dep-trunk ()
|
||||
(div (~dep-branch) (~dep-leaf)))
|
||||
|
||||
(defcomp ~dep-conditional (&key show?)
|
||||
(if show?
|
||||
(~dep-leaf)
|
||||
(~dep-branch)))
|
||||
|
||||
(defcomp ~dep-nested-cond (&key mode)
|
||||
(cond
|
||||
(= mode "a") (~dep-leaf)
|
||||
(= mode "b") (~dep-branch)
|
||||
:else (~dep-trunk)))
|
||||
|
||||
(defcomp ~dep-island ()
|
||||
(div "no deps"))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 1. scan-refs — finds component references in AST nodes
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "scan-refs"
|
||||
|
||||
(deftest "empty for string literal"
|
||||
(assert-equal (list) (scan-refs "hello")))
|
||||
|
||||
(deftest "empty for number"
|
||||
(assert-equal (list) (scan-refs 42)))
|
||||
|
||||
(deftest "finds component symbol"
|
||||
(let ((refs (scan-refs (quote (~dep-leaf)))))
|
||||
(assert-contains "~dep-leaf" refs)))
|
||||
|
||||
(deftest "finds in nested list"
|
||||
(let ((refs (scan-refs (quote (div (span (~dep-leaf)))))))
|
||||
(assert-contains "~dep-leaf" refs)))
|
||||
|
||||
(deftest "finds multiple refs"
|
||||
(let ((refs (scan-refs (quote (div (~dep-leaf) (~dep-branch))))))
|
||||
(assert-contains "~dep-leaf" refs)
|
||||
(assert-contains "~dep-branch" refs)))
|
||||
|
||||
(deftest "deduplicates"
|
||||
(let ((refs (scan-refs (quote (div (~dep-leaf) (~dep-leaf))))))
|
||||
(assert-equal 1 (len refs))))
|
||||
|
||||
(deftest "walks if branches"
|
||||
(let ((refs (scan-refs (quote (if true (~dep-leaf) (~dep-branch))))))
|
||||
(assert-contains "~dep-leaf" refs)
|
||||
(assert-contains "~dep-branch" refs)))
|
||||
|
||||
(deftest "walks cond branches"
|
||||
(let ((refs (scan-refs (quote (cond (= x 1) (~dep-leaf) :else (~dep-trunk))))))
|
||||
(assert-contains "~dep-leaf" refs)
|
||||
(assert-contains "~dep-trunk" refs)))
|
||||
|
||||
(deftest "ignores non-component symbols"
|
||||
(let ((refs (scan-refs (quote (div class "foo")))))
|
||||
(assert-equal 0 (len refs)))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 2. scan-components-from-source — regex-based source string scanning
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "scan-components-from-source"
|
||||
|
||||
(deftest "finds single component"
|
||||
(let ((refs (scan-components-from-source "(~dep-leaf)")))
|
||||
(assert-contains "~dep-leaf" refs)))
|
||||
|
||||
(deftest "finds multiple components"
|
||||
(let ((refs (scan-components-from-source "(div (~dep-leaf) (~dep-branch))")))
|
||||
(assert-contains "~dep-leaf" refs)
|
||||
(assert-contains "~dep-branch" refs)))
|
||||
|
||||
(deftest "no false positives on plain text"
|
||||
(let ((refs (scan-components-from-source "(div \"hello world\")")))
|
||||
(assert-equal 0 (len refs))))
|
||||
|
||||
(deftest "handles hyphenated names"
|
||||
(let ((refs (scan-components-from-source "(~my-component :key val)")))
|
||||
(assert-contains "~my-component" refs))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 3. transitive-deps — transitive dependency closure
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "transitive-deps"
|
||||
|
||||
(deftest "leaf has no deps"
|
||||
(let ((deps (transitive-deps "~dep-leaf" (test-env))))
|
||||
(assert-equal 0 (len deps))))
|
||||
|
||||
(deftest "direct dependency"
|
||||
(let ((deps (transitive-deps "~dep-branch" (test-env))))
|
||||
(assert-contains "~dep-leaf" deps)))
|
||||
|
||||
(deftest "transitive closure"
|
||||
(let ((deps (transitive-deps "~dep-trunk" (test-env))))
|
||||
(assert-contains "~dep-branch" deps)
|
||||
(assert-contains "~dep-leaf" deps)))
|
||||
|
||||
(deftest "excludes self"
|
||||
(let ((deps (transitive-deps "~dep-trunk" (test-env))))
|
||||
(assert-false (contains? deps "~dep-trunk"))))
|
||||
|
||||
(deftest "walks conditional branches"
|
||||
(let ((deps (transitive-deps "~dep-conditional" (test-env))))
|
||||
(assert-contains "~dep-leaf" deps)
|
||||
(assert-contains "~dep-branch" deps)))
|
||||
|
||||
(deftest "walks all cond branches"
|
||||
(let ((deps (transitive-deps "~dep-nested-cond" (test-env))))
|
||||
(assert-contains "~dep-leaf" deps)
|
||||
(assert-contains "~dep-branch" deps)
|
||||
(assert-contains "~dep-trunk" deps)))
|
||||
|
||||
(deftest "island has no deps"
|
||||
(let ((deps (transitive-deps "~dep-island" (test-env))))
|
||||
(assert-equal 0 (len deps))))
|
||||
|
||||
(deftest "accepts name without tilde"
|
||||
(let ((deps (transitive-deps "dep-branch" (test-env))))
|
||||
(assert-contains "~dep-leaf" deps))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 4. components-needed — page bundle computation
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "components-needed"
|
||||
|
||||
(deftest "finds direct and transitive"
|
||||
(let ((needed (components-needed "(~dep-trunk)" (test-env))))
|
||||
(assert-contains "~dep-trunk" needed)
|
||||
(assert-contains "~dep-branch" needed)
|
||||
(assert-contains "~dep-leaf" needed)))
|
||||
|
||||
(deftest "deduplicates"
|
||||
(let ((needed (components-needed "(div (~dep-leaf) (~dep-leaf))" (test-env))))
|
||||
;; ~dep-leaf should appear only once
|
||||
(assert-true (contains? needed "~dep-leaf"))))
|
||||
|
||||
(deftest "handles leaf page"
|
||||
(let ((needed (components-needed "(~dep-island)" (test-env))))
|
||||
(assert-contains "~dep-island" needed)
|
||||
(assert-equal 1 (len needed))))
|
||||
|
||||
(deftest "handles multiple top-level components"
|
||||
(let ((needed (components-needed "(div (~dep-leaf) (~dep-island))" (test-env))))
|
||||
(assert-contains "~dep-leaf" needed)
|
||||
(assert-contains "~dep-island" needed))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 5. IO detection — scan-io-refs, component-pure?
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
;; Define components that reference "io" functions for testing
|
||||
(defcomp ~dep-pure ()
|
||||
(div (~dep-leaf) "static"))
|
||||
|
||||
(defcomp ~dep-io ()
|
||||
(div (fetch-data "/api")))
|
||||
|
||||
(defcomp ~dep-io-indirect ()
|
||||
(div (~dep-io)))
|
||||
|
||||
(defsuite "scan-io-refs"
|
||||
|
||||
(deftest "no IO in pure AST"
|
||||
(let ((refs (scan-io-refs (quote (div "hello" (span "world"))) (list "fetch-data"))))
|
||||
(assert-equal 0 (len refs))))
|
||||
|
||||
(deftest "finds IO reference"
|
||||
(let ((refs (scan-io-refs (quote (div (fetch-data "/api"))) (list "fetch-data"))))
|
||||
(assert-contains "fetch-data" refs)))
|
||||
|
||||
(deftest "multiple IO refs"
|
||||
(let ((refs (scan-io-refs (quote (div (fetch-data "/a") (query-db "x"))) (list "fetch-data" "query-db"))))
|
||||
(assert-contains "fetch-data" refs)
|
||||
(assert-contains "query-db" refs)))
|
||||
|
||||
(deftest "ignores non-IO symbols"
|
||||
(let ((refs (scan-io-refs (quote (div (map str items))) (list "fetch-data"))))
|
||||
(assert-equal 0 (len refs)))))
|
||||
|
||||
|
||||
(defsuite "component-pure?"
|
||||
|
||||
(deftest "pure component is pure"
|
||||
(assert-true (component-pure? "~dep-pure" (test-env) (list "fetch-data"))))
|
||||
|
||||
(deftest "IO component is not pure"
|
||||
(assert-false (component-pure? "~dep-io" (test-env) (list "fetch-data"))))
|
||||
|
||||
(deftest "indirect IO is not pure"
|
||||
(assert-false (component-pure? "~dep-io-indirect" (test-env) (list "fetch-data"))))
|
||||
|
||||
(deftest "leaf component is pure"
|
||||
(assert-true (component-pure? "~dep-leaf" (test-env) (list "fetch-data")))))
|
||||
212
shared/sx/ref/test-engine.sx
Normal file
212
shared/sx/ref/test-engine.sx
Normal file
@@ -0,0 +1,212 @@
|
||||
;; ==========================================================================
|
||||
;; test-engine.sx — Tests for SxEngine pure logic (engine.sx)
|
||||
;;
|
||||
;; Requires: test-framework.sx loaded first.
|
||||
;; Platform functions: parse-time, parse-trigger-spec, default-trigger,
|
||||
;; parse-swap-spec, parse-retry-spec, next-retry-ms, filter-params
|
||||
;; (loaded from bootstrapped output by test runners)
|
||||
;; ==========================================================================
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 1. parse-time — time string parsing
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "parse-time"
|
||||
|
||||
(deftest "seconds to ms"
|
||||
(assert-equal 2000 (parse-time "2s")))
|
||||
|
||||
(deftest "milliseconds"
|
||||
(assert-equal 500 (parse-time "500ms")))
|
||||
|
||||
(deftest "nil returns 0"
|
||||
(assert-equal 0 (parse-time nil)))
|
||||
|
||||
(deftest "plain number string"
|
||||
(assert-equal 100 (parse-time "100")))
|
||||
|
||||
(deftest "one second"
|
||||
(assert-equal 1000 (parse-time "1s")))
|
||||
|
||||
(deftest "large seconds"
|
||||
(assert-equal 30000 (parse-time "30s"))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 2. parse-trigger-spec — trigger attribute parsing
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "parse-trigger-spec"
|
||||
|
||||
(deftest "nil returns nil"
|
||||
(assert-nil (parse-trigger-spec nil)))
|
||||
|
||||
(deftest "single event"
|
||||
(let ((triggers (parse-trigger-spec "click")))
|
||||
(assert-equal 1 (len triggers))
|
||||
(assert-equal "click" (get (first triggers) "event"))))
|
||||
|
||||
(deftest "event with once modifier"
|
||||
(let ((triggers (parse-trigger-spec "click once")))
|
||||
(assert-equal 1 (len triggers))
|
||||
(assert-equal "click" (get (first triggers) "event"))
|
||||
(assert-true (get (get (first triggers) "modifiers") "once"))))
|
||||
|
||||
(deftest "event with delay modifier"
|
||||
(let ((triggers (parse-trigger-spec "click delay:500ms")))
|
||||
(assert-equal 1 (len triggers))
|
||||
(assert-equal 500 (get (get (first triggers) "modifiers") "delay"))))
|
||||
|
||||
(deftest "multiple triggers comma-separated"
|
||||
(let ((triggers (parse-trigger-spec "click,change")))
|
||||
(assert-equal 2 (len triggers))
|
||||
(assert-equal "click" (get (first triggers) "event"))
|
||||
(assert-equal "change" (get (nth triggers 1) "event"))))
|
||||
|
||||
(deftest "polling trigger"
|
||||
(let ((triggers (parse-trigger-spec "every 3s")))
|
||||
(assert-equal 1 (len triggers))
|
||||
(assert-equal "every" (get (first triggers) "event"))
|
||||
(assert-equal 3000 (get (get (first triggers) "modifiers") "interval"))))
|
||||
|
||||
(deftest "event with from modifier"
|
||||
(let ((triggers (parse-trigger-spec "click from:body")))
|
||||
(assert-equal "body" (get (get (first triggers) "modifiers") "from"))))
|
||||
|
||||
(deftest "event with changed modifier"
|
||||
(let ((triggers (parse-trigger-spec "keyup changed")))
|
||||
(assert-equal "keyup" (get (first triggers) "event"))
|
||||
(assert-true (get (get (first triggers) "modifiers") "changed")))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 3. default-trigger — default trigger by element tag
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "default-trigger"
|
||||
|
||||
(deftest "form submits"
|
||||
(let ((triggers (default-trigger "FORM")))
|
||||
(assert-equal "submit" (get (first triggers) "event"))))
|
||||
|
||||
(deftest "input changes"
|
||||
(let ((triggers (default-trigger "INPUT")))
|
||||
(assert-equal "change" (get (first triggers) "event"))))
|
||||
|
||||
(deftest "select changes"
|
||||
(let ((triggers (default-trigger "SELECT")))
|
||||
(assert-equal "change" (get (first triggers) "event"))))
|
||||
|
||||
(deftest "textarea changes"
|
||||
(let ((triggers (default-trigger "TEXTAREA")))
|
||||
(assert-equal "change" (get (first triggers) "event"))))
|
||||
|
||||
(deftest "div clicks"
|
||||
(let ((triggers (default-trigger "DIV")))
|
||||
(assert-equal "click" (get (first triggers) "event"))))
|
||||
|
||||
(deftest "button clicks"
|
||||
(let ((triggers (default-trigger "BUTTON")))
|
||||
(assert-equal "click" (get (first triggers) "event")))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 4. parse-swap-spec — swap specification parsing
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "parse-swap-spec"
|
||||
|
||||
(deftest "default swap"
|
||||
(let ((spec (parse-swap-spec nil false)))
|
||||
(assert-equal "outerHTML" (get spec "style"))
|
||||
(assert-false (get spec "transition"))))
|
||||
|
||||
(deftest "innerHTML"
|
||||
(let ((spec (parse-swap-spec "innerHTML" false)))
|
||||
(assert-equal "innerHTML" (get spec "style"))))
|
||||
|
||||
(deftest "with transition true"
|
||||
(let ((spec (parse-swap-spec "innerHTML transition:true" false)))
|
||||
(assert-equal "innerHTML" (get spec "style"))
|
||||
(assert-true (get spec "transition"))))
|
||||
|
||||
(deftest "transition false overrides global"
|
||||
(let ((spec (parse-swap-spec "outerHTML transition:false" true)))
|
||||
(assert-equal "outerHTML" (get spec "style"))
|
||||
(assert-false (get spec "transition"))))
|
||||
|
||||
(deftest "global transition when not overridden"
|
||||
(let ((spec (parse-swap-spec "innerHTML" true)))
|
||||
(assert-equal "innerHTML" (get spec "style"))
|
||||
(assert-true (get spec "transition")))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 5. parse-retry-spec — retry specification parsing
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "parse-retry-spec"
|
||||
|
||||
(deftest "nil returns nil"
|
||||
(assert-nil (parse-retry-spec nil)))
|
||||
|
||||
(deftest "exponential backoff"
|
||||
(let ((spec (parse-retry-spec "exponential:1000:30000")))
|
||||
(assert-equal "exponential" (get spec "strategy"))
|
||||
(assert-equal 1000 (get spec "start-ms"))
|
||||
(assert-equal 30000 (get spec "cap-ms"))))
|
||||
|
||||
(deftest "linear strategy"
|
||||
(let ((spec (parse-retry-spec "linear:2000:60000")))
|
||||
(assert-equal "linear" (get spec "strategy"))
|
||||
(assert-equal 2000 (get spec "start-ms"))
|
||||
(assert-equal 60000 (get spec "cap-ms")))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 6. next-retry-ms — exponential backoff calculation
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "next-retry-ms"
|
||||
|
||||
(deftest "doubles current"
|
||||
(assert-equal 2000 (next-retry-ms 1000 30000)))
|
||||
|
||||
(deftest "caps at maximum"
|
||||
(assert-equal 30000 (next-retry-ms 20000 30000)))
|
||||
|
||||
(deftest "exact cap"
|
||||
(assert-equal 30000 (next-retry-ms 15000 30000)))
|
||||
|
||||
(deftest "small initial"
|
||||
(assert-equal 200 (next-retry-ms 100 30000))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 7. filter-params — form parameter filtering
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "filter-params"
|
||||
|
||||
(deftest "nil passes all through"
|
||||
(let ((params (list (list "a" "1") (list "b" "2"))))
|
||||
(assert-equal 2 (len (filter-params nil params)))))
|
||||
|
||||
(deftest "none returns empty"
|
||||
(let ((params (list (list "a" "1") (list "b" "2"))))
|
||||
(assert-equal 0 (len (filter-params "none" params)))))
|
||||
|
||||
(deftest "star passes all"
|
||||
(let ((params (list (list "a" "1") (list "b" "2"))))
|
||||
(assert-equal 2 (len (filter-params "*" params)))))
|
||||
|
||||
(deftest "whitelist"
|
||||
(let ((params (list (list "name" "Jo") (list "age" "30") (list "secret" "x"))))
|
||||
(let ((filtered (filter-params "name,age" params)))
|
||||
(assert-equal 2 (len filtered)))))
|
||||
|
||||
(deftest "blacklist with not"
|
||||
(let ((params (list (list "name" "Jo") (list "csrf" "tok") (list "age" "30"))))
|
||||
(let ((filtered (filter-params "not csrf" params)))
|
||||
(assert-equal 2 (len filtered))))))
|
||||
@@ -492,3 +492,177 @@
|
||||
(assert-equal 0 (reduce (fn (acc x) (+ acc x)) 0 (list)))
|
||||
(assert-equal 0 (len (list)))
|
||||
(assert-equal "" (str))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; defpage
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "defpage"
|
||||
(deftest "basic defpage returns page-def"
|
||||
(let ((p (defpage test-basic :path "/test" :auth :public :content (div "hello"))))
|
||||
(assert-true (not (nil? p)))
|
||||
(assert-equal "test-basic" (get p "name"))
|
||||
(assert-equal "/test" (get p "path"))
|
||||
(assert-equal "public" (get p "auth"))))
|
||||
|
||||
(deftest "defpage content expr is unevaluated AST"
|
||||
(let ((p (defpage test-content :path "/c" :auth :public :content (~my-comp :title "hi"))))
|
||||
(assert-true (not (nil? (get p "content"))))))
|
||||
|
||||
(deftest "defpage with :stream"
|
||||
(let ((p (defpage test-stream :path "/s" :auth :public :stream true :content (div "x"))))
|
||||
(assert-equal true (get p "stream"))))
|
||||
|
||||
(deftest "defpage with :shell"
|
||||
(let ((p (defpage test-shell :path "/sh" :auth :public :stream true
|
||||
:shell (~my-layout (~suspense :id "data" :fallback (div "loading...")))
|
||||
:content (~my-streamed :data data-val))))
|
||||
(assert-true (not (nil? (get p "shell"))))
|
||||
(assert-true (not (nil? (get p "content"))))))
|
||||
|
||||
(deftest "defpage with :fallback"
|
||||
(let ((p (defpage test-fallback :path "/f" :auth :public :stream true
|
||||
:fallback (div :class "skeleton" "loading")
|
||||
:content (div "done"))))
|
||||
(assert-true (not (nil? (get p "fallback"))))))
|
||||
|
||||
(deftest "defpage with :data"
|
||||
(let ((p (defpage test-data :path "/d" :auth :public
|
||||
:data (fetch-items)
|
||||
:content (~items-list :items items))))
|
||||
(assert-true (not (nil? (get p "data"))))))
|
||||
|
||||
(deftest "defpage missing fields are nil"
|
||||
(let ((p (defpage test-minimal :path "/m" :auth :public :content (div "x"))))
|
||||
(assert-nil (get p "data"))
|
||||
(assert-nil (get p "filter"))
|
||||
(assert-nil (get p "aside"))
|
||||
(assert-nil (get p "menu"))
|
||||
(assert-nil (get p "shell"))
|
||||
(assert-nil (get p "fallback"))
|
||||
(assert-equal false (get p "stream")))))
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Multi-stream data protocol (from forms.sx)
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "stream-chunk-id"
|
||||
(deftest "extracts stream-id from chunk"
|
||||
(assert-equal "my-slot" (stream-chunk-id {"stream-id" "my-slot" "x" 1})))
|
||||
|
||||
(deftest "defaults to stream-content when missing"
|
||||
(assert-equal "stream-content" (stream-chunk-id {"x" 1 "y" 2}))))
|
||||
|
||||
(defsuite "stream-chunk-bindings"
|
||||
(deftest "removes stream-id from chunk"
|
||||
(let ((bindings (stream-chunk-bindings {"stream-id" "slot" "name" "alice" "age" 30})))
|
||||
(assert-equal "alice" (get bindings "name"))
|
||||
(assert-equal 30 (get bindings "age"))
|
||||
(assert-nil (get bindings "stream-id"))))
|
||||
|
||||
(deftest "returns all keys when no stream-id"
|
||||
(let ((bindings (stream-chunk-bindings {"a" 1 "b" 2})))
|
||||
(assert-equal 1 (get bindings "a"))
|
||||
(assert-equal 2 (get bindings "b")))))
|
||||
|
||||
(defsuite "normalize-binding-key"
|
||||
(deftest "converts underscores to hyphens"
|
||||
(assert-equal "my-key" (normalize-binding-key "my_key")))
|
||||
|
||||
(deftest "leaves hyphens unchanged"
|
||||
(assert-equal "my-key" (normalize-binding-key "my-key")))
|
||||
|
||||
(deftest "handles multiple underscores"
|
||||
(assert-equal "a-b-c" (normalize-binding-key "a_b_c"))))
|
||||
|
||||
(defsuite "bind-stream-chunk"
|
||||
(deftest "creates fresh env with bindings"
|
||||
(let ((base {"existing" 42})
|
||||
(chunk {"stream-id" "slot" "user-name" "bob" "count" 5})
|
||||
(env (bind-stream-chunk chunk base)))
|
||||
;; Base env bindings are preserved
|
||||
(assert-equal 42 (get env "existing"))
|
||||
;; Chunk bindings are added (stream-id removed)
|
||||
(assert-equal "bob" (get env "user-name"))
|
||||
(assert-equal 5 (get env "count"))
|
||||
;; stream-id is not in env
|
||||
(assert-nil (get env "stream-id"))))
|
||||
|
||||
(deftest "isolates env from base — bindings don't leak to base"
|
||||
(let ((base {"x" 1})
|
||||
(chunk {"stream-id" "s" "y" 2})
|
||||
(env (bind-stream-chunk chunk base)))
|
||||
;; Chunk bindings should not appear in base
|
||||
(assert-nil (get base "y"))
|
||||
;; Base bindings should be in derived env
|
||||
(assert-equal 1 (get env "x")))))
|
||||
|
||||
(defsuite "validate-stream-data"
|
||||
(deftest "valid: list of dicts"
|
||||
(assert-true (validate-stream-data
|
||||
(list {"stream-id" "a" "x" 1}
|
||||
{"stream-id" "b" "y" 2}))))
|
||||
|
||||
(deftest "valid: empty list"
|
||||
(assert-true (validate-stream-data (list))))
|
||||
|
||||
(deftest "invalid: single dict (not a list)"
|
||||
(assert-equal false (validate-stream-data {"x" 1})))
|
||||
|
||||
(deftest "invalid: list containing non-dict"
|
||||
(assert-equal false (validate-stream-data (list {"x" 1} "oops" {"y" 2})))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Multi-stream end-to-end scenarios
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "multi-stream routing"
|
||||
(deftest "stream-chunk-id routes different chunks to different slots"
|
||||
(let ((chunks (list
|
||||
{"stream-id" "stream-fast" "msg" "quick"}
|
||||
{"stream-id" "stream-medium" "msg" "steady"}
|
||||
{"stream-id" "stream-slow" "msg" "slow"}))
|
||||
(ids (map stream-chunk-id chunks)))
|
||||
(assert-equal "stream-fast" (nth ids 0))
|
||||
(assert-equal "stream-medium" (nth ids 1))
|
||||
(assert-equal "stream-slow" (nth ids 2))))
|
||||
|
||||
(deftest "bind-stream-chunk creates isolated envs per chunk"
|
||||
(let ((base {"layout" "main"})
|
||||
(chunk-a {"stream-id" "a" "title" "First" "count" 1})
|
||||
(chunk-b {"stream-id" "b" "title" "Second" "count" 2})
|
||||
(env-a (bind-stream-chunk chunk-a base))
|
||||
(env-b (bind-stream-chunk chunk-b base)))
|
||||
;; Each env has its own bindings
|
||||
(assert-equal "First" (get env-a "title"))
|
||||
(assert-equal "Second" (get env-b "title"))
|
||||
(assert-equal 1 (get env-a "count"))
|
||||
(assert-equal 2 (get env-b "count"))
|
||||
;; Both share base
|
||||
(assert-equal "main" (get env-a "layout"))
|
||||
(assert-equal "main" (get env-b "layout"))
|
||||
;; Neither leaks into base
|
||||
(assert-nil (get base "title"))))
|
||||
|
||||
(deftest "normalize-binding-key applied to chunk keys"
|
||||
(let ((chunk {"stream-id" "s" "user_name" "alice" "item_count" 3})
|
||||
(bindings (stream-chunk-bindings chunk)))
|
||||
;; Keys with underscores need normalizing for SX env
|
||||
(assert-equal "alice" (get bindings "user_name"))
|
||||
;; normalize-binding-key converts them
|
||||
(assert-equal "user-name" (normalize-binding-key "user_name"))
|
||||
(assert-equal "item-count" (normalize-binding-key "item_count"))))
|
||||
|
||||
(deftest "defpage stream flag defaults to false"
|
||||
(let ((p (defpage test-no-stream :path "/ns" :auth :public :content (div "x"))))
|
||||
(assert-equal false (get p "stream"))))
|
||||
|
||||
(deftest "defpage stream true recorded in page-def"
|
||||
(let ((p (defpage test-with-stream :path "/ws" :auth :public
|
||||
:stream true
|
||||
:shell (~layout (~suspense :id "data"))
|
||||
:content (~chunk :val val))))
|
||||
(assert-equal true (get p "stream"))
|
||||
(assert-true (not (nil? (get p "shell")))))))
|
||||
|
||||
@@ -122,4 +122,40 @@
|
||||
;; /docs/ should match docs-index, not docs-page
|
||||
(let ((result (find-matching-route "/docs/" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal "docs-index" (get result "name"))))))
|
||||
(assert-equal "docs-index" (get result "name")))))
|
||||
|
||||
(deftest "propagates stream flag from route"
|
||||
(let ((routes (list
|
||||
{:pattern "/demo/streaming"
|
||||
:parsed (parse-route-pattern "/demo/streaming")
|
||||
:name "streaming-demo"
|
||||
:stream true
|
||||
:has-data true})))
|
||||
(let ((result (find-matching-route "/demo/streaming" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal true (get result "stream"))
|
||||
(assert-equal true (get result "has-data")))))
|
||||
|
||||
(deftest "non-streaming route has no stream flag"
|
||||
(let ((routes (list
|
||||
{:pattern "/about"
|
||||
:parsed (parse-route-pattern "/about")
|
||||
:name "about"
|
||||
:has-data false})))
|
||||
(let ((result (find-matching-route "/about" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-nil (get result "stream")))))
|
||||
|
||||
(deftest "streaming route with params propagates all properties"
|
||||
(let ((routes (list
|
||||
{:pattern "/stream/<id>"
|
||||
:parsed (parse-route-pattern "/stream/<id>")
|
||||
:name "stream-page"
|
||||
:stream true
|
||||
:has-data true
|
||||
:content "expr"})))
|
||||
(let ((result (find-matching-route "/stream/fast" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal true (get result "stream"))
|
||||
(assert-equal "fast" (get (get result "params") "id"))
|
||||
(assert-equal "expr" (get result "content"))))))
|
||||
|
||||
@@ -15,6 +15,20 @@
|
||||
(body :class "bg-stone-50 text-stone-900"
|
||||
children))))
|
||||
|
||||
;; ---------------------------------------------------------------------------
|
||||
;; Suspense — streaming placeholder that renders fallback until resolved.
|
||||
;;
|
||||
;; Server-side: rendered in the initial streaming chunk with a fallback.
|
||||
;; Client-side: replaced when the server streams a resolution chunk via
|
||||
;; <script>__sxResolve("id", "(resolved sx ...)")</script>
|
||||
;; ---------------------------------------------------------------------------
|
||||
|
||||
(defcomp ~suspense (&key id fallback &rest children)
|
||||
(div :id (str "sx-suspense-" id)
|
||||
:data-suspense id
|
||||
:style "display:contents"
|
||||
(if (not (empty? children)) children fallback)))
|
||||
|
||||
(defcomp ~error-page (&key title message image asset-url)
|
||||
(~base-shell :title title :asset-url asset-url
|
||||
(div :class "text-center p-8 max-w-lg mx-auto"
|
||||
|
||||
@@ -159,6 +159,8 @@ var SPECS = {
|
||||
"parser": { file: "test-parser.sx", needs: ["sx-parse"] },
|
||||
"router": { file: "test-router.sx", needs: [] },
|
||||
"render": { file: "test-render.sx", needs: ["render-html"] },
|
||||
"deps": { file: "test-deps.sx", needs: [] },
|
||||
"engine": { file: "test-engine.sx", needs: [] },
|
||||
};
|
||||
|
||||
function evalFile(filename) {
|
||||
@@ -215,9 +217,6 @@ if (args[0] === "--legacy") {
|
||||
|
||||
// Load prerequisite spec modules
|
||||
if (specName === "router") {
|
||||
// Use bootstrapped router functions from sx-browser.js.
|
||||
// The bare evaluator can't run router.sx faithfully because set!
|
||||
// inside lambda closures doesn't propagate (dict copies, not cells).
|
||||
if (Sx.splitPathSegments) {
|
||||
env["split-path-segments"] = Sx.splitPathSegments;
|
||||
env["parse-route-pattern"] = Sx.parseRoutePattern;
|
||||
@@ -230,6 +229,35 @@ if (args[0] === "--legacy") {
|
||||
}
|
||||
}
|
||||
|
||||
if (specName === "deps") {
|
||||
if (Sx.scanRefs) {
|
||||
env["scan-refs"] = Sx.scanRefs;
|
||||
env["scan-components-from-source"] = Sx.scanComponentsFromSource;
|
||||
env["transitive-deps"] = Sx.transitiveDeps;
|
||||
env["compute-all-deps"] = Sx.computeAllDeps;
|
||||
env["components-needed"] = Sx.componentsNeeded;
|
||||
env["page-component-bundle"] = Sx.pageComponentBundle;
|
||||
env["page-css-classes"] = Sx.pageCssClasses;
|
||||
env["scan-io-refs"] = Sx.scanIoRefs;
|
||||
env["transitive-io-refs"] = Sx.transitiveIoRefs;
|
||||
env["compute-all-io-refs"] = Sx.computeAllIoRefs;
|
||||
env["component-pure?"] = Sx.componentPure_p;
|
||||
env["test-env"] = function() { return env; };
|
||||
}
|
||||
}
|
||||
|
||||
if (specName === "engine") {
|
||||
if (Sx.parseTime) {
|
||||
env["parse-time"] = Sx.parseTime;
|
||||
env["parse-trigger-spec"] = Sx.parseTriggerSpec;
|
||||
env["default-trigger"] = Sx.defaultTrigger;
|
||||
env["parse-swap-spec"] = Sx.parseSwapSpec;
|
||||
env["parse-retry-spec"] = Sx.parseRetrySpec;
|
||||
env["next-retry-ms"] = function(cur, cap) { return Math.min(cur * 2, cap); };
|
||||
env["filter-params"] = Sx.filterParams;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("# --- " + specName + " ---");
|
||||
evalFile(spec.file);
|
||||
}
|
||||
|
||||
@@ -141,6 +141,8 @@ SPECS = {
|
||||
"parser": {"file": "test-parser.sx", "needs": ["sx-parse"]},
|
||||
"router": {"file": "test-router.sx", "needs": []},
|
||||
"render": {"file": "test-render.sx", "needs": ["render-html"]},
|
||||
"deps": {"file": "test-deps.sx", "needs": []},
|
||||
"engine": {"file": "test-engine.sx", "needs": []},
|
||||
}
|
||||
|
||||
REF_DIR = os.path.join(_HERE, "..", "ref")
|
||||
@@ -269,6 +271,81 @@ def _load_router_from_bootstrap(env):
|
||||
eval_file("router.sx", env)
|
||||
|
||||
|
||||
def _load_deps_from_bootstrap(env):
|
||||
"""Load deps functions from the bootstrapped sx_ref.py."""
|
||||
try:
|
||||
from shared.sx.ref.sx_ref import (
|
||||
scan_refs,
|
||||
scan_components_from_source,
|
||||
transitive_deps,
|
||||
compute_all_deps,
|
||||
components_needed,
|
||||
page_component_bundle,
|
||||
page_css_classes,
|
||||
scan_io_refs,
|
||||
transitive_io_refs,
|
||||
compute_all_io_refs,
|
||||
component_pure_p,
|
||||
)
|
||||
env["scan-refs"] = scan_refs
|
||||
env["scan-components-from-source"] = scan_components_from_source
|
||||
env["transitive-deps"] = transitive_deps
|
||||
env["compute-all-deps"] = compute_all_deps
|
||||
env["components-needed"] = components_needed
|
||||
env["page-component-bundle"] = page_component_bundle
|
||||
env["page-css-classes"] = page_css_classes
|
||||
env["scan-io-refs"] = scan_io_refs
|
||||
env["transitive-io-refs"] = transitive_io_refs
|
||||
env["compute-all-io-refs"] = compute_all_io_refs
|
||||
env["component-pure?"] = component_pure_p
|
||||
env["test-env"] = lambda: env
|
||||
except ImportError:
|
||||
eval_file("deps.sx", env)
|
||||
env["test-env"] = lambda: env
|
||||
|
||||
|
||||
def _load_engine_from_bootstrap(env):
|
||||
"""Load engine pure functions from the bootstrapped sx_ref.py."""
|
||||
try:
|
||||
from shared.sx.ref.sx_ref import (
|
||||
parse_time,
|
||||
parse_trigger_spec,
|
||||
default_trigger,
|
||||
parse_swap_spec,
|
||||
parse_retry_spec,
|
||||
next_retry_ms,
|
||||
filter_params,
|
||||
)
|
||||
env["parse-time"] = parse_time
|
||||
env["parse-trigger-spec"] = parse_trigger_spec
|
||||
env["default-trigger"] = default_trigger
|
||||
env["parse-swap-spec"] = parse_swap_spec
|
||||
env["parse-retry-spec"] = parse_retry_spec
|
||||
env["next-retry-ms"] = next_retry_ms
|
||||
env["filter-params"] = filter_params
|
||||
except ImportError:
|
||||
eval_file("engine.sx", env)
|
||||
|
||||
|
||||
def _load_forms_from_bootstrap(env):
|
||||
"""Load forms functions (including streaming protocol) from sx_ref.py."""
|
||||
try:
|
||||
from shared.sx.ref.sx_ref import (
|
||||
stream_chunk_id,
|
||||
stream_chunk_bindings,
|
||||
normalize_binding_key,
|
||||
bind_stream_chunk,
|
||||
validate_stream_data,
|
||||
)
|
||||
env["stream-chunk-id"] = stream_chunk_id
|
||||
env["stream-chunk-bindings"] = stream_chunk_bindings
|
||||
env["normalize-binding-key"] = normalize_binding_key
|
||||
env["bind-stream-chunk"] = bind_stream_chunk
|
||||
env["validate-stream-data"] = validate_stream_data
|
||||
except ImportError:
|
||||
eval_file("forms.sx", env)
|
||||
|
||||
|
||||
def main():
|
||||
global passed, failed, test_num
|
||||
|
||||
@@ -304,8 +381,14 @@ def main():
|
||||
continue
|
||||
|
||||
# Load prerequisite spec modules
|
||||
if spec_name == "eval":
|
||||
_load_forms_from_bootstrap(env)
|
||||
if spec_name == "router":
|
||||
_load_router_from_bootstrap(env)
|
||||
if spec_name == "deps":
|
||||
_load_deps_from_bootstrap(env)
|
||||
if spec_name == "engine":
|
||||
_load_engine_from_bootstrap(env)
|
||||
|
||||
print(f"# --- {spec_name} ---")
|
||||
eval_file(spec["file"], env)
|
||||
|
||||
@@ -6,7 +6,7 @@ against the bootstrap-compiled evaluator to verify correctness.
|
||||
|
||||
import pytest
|
||||
from shared.sx.parser import parse
|
||||
from shared.sx.types import Symbol, Keyword, NIL, Lambda, Component, Macro
|
||||
from shared.sx.types import Symbol, Keyword, NIL, Lambda, Component, Macro, PageDef
|
||||
from shared.sx.ref import sx_ref
|
||||
|
||||
|
||||
|
||||
@@ -241,8 +241,26 @@ class PageDef:
|
||||
filter_expr: Any
|
||||
aside_expr: Any
|
||||
menu_expr: Any
|
||||
stream: bool = False # enable streaming response
|
||||
fallback_expr: Any = None # fallback content while streaming
|
||||
shell_expr: Any = None # immediate shell content (wraps suspense)
|
||||
closure: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
_FIELD_MAP = {
|
||||
"name": "name", "path": "path", "auth": "auth",
|
||||
"layout": "layout", "cache": "cache",
|
||||
"data": "data_expr", "content": "content_expr",
|
||||
"filter": "filter_expr", "aside": "aside_expr",
|
||||
"menu": "menu_expr", "stream": "stream",
|
||||
"fallback": "fallback_expr", "shell": "shell_expr",
|
||||
}
|
||||
|
||||
def get(self, key, default=None):
|
||||
attr = self._FIELD_MAP.get(key)
|
||||
if attr is not None:
|
||||
return getattr(self, attr)
|
||||
return default
|
||||
|
||||
def __repr__(self):
|
||||
return f"<page:{self.name} path={self.path!r}>"
|
||||
|
||||
|
||||
@@ -69,3 +69,8 @@
|
||||
:params (spec-name)
|
||||
:returns "dict"
|
||||
:service "sx")
|
||||
|
||||
(define-page-helper "streaming-demo-data"
|
||||
:params ()
|
||||
:returns "async-generator<dict>"
|
||||
:service "sx")
|
||||
|
||||
@@ -115,6 +115,8 @@
|
||||
(dict :label "Parser" :href "/testing/parser")
|
||||
(dict :label "Router" :href "/testing/router")
|
||||
(dict :label "Renderer" :href "/testing/render")
|
||||
(dict :label "Dependencies" :href "/testing/deps")
|
||||
(dict :label "Engine" :href "/testing/engine")
|
||||
(dict :label "Runners" :href "/testing/runners")))
|
||||
|
||||
(define isomorphism-nav-items (list
|
||||
@@ -122,7 +124,8 @@
|
||||
(dict :label "Bundle Analyzer" :href "/isomorphism/bundle-analyzer")
|
||||
(dict :label "Routing Analyzer" :href "/isomorphism/routing-analyzer")
|
||||
(dict :label "Data Test" :href "/isomorphism/data-test")
|
||||
(dict :label "Async IO" :href "/isomorphism/async-io")))
|
||||
(dict :label "Async IO" :href "/isomorphism/async-io")
|
||||
(dict :label "Streaming" :href "/isomorphism/streaming")))
|
||||
|
||||
(define plans-nav-items (list
|
||||
(dict :label "Status" :href "/plans/status"
|
||||
@@ -142,7 +145,11 @@
|
||||
(dict :label "Social Sharing" :href "/plans/social-sharing"
|
||||
:summary "OAuth-based sharing to Facebook, Instagram, Threads, Twitter/X, LinkedIn, and Mastodon.")
|
||||
(dict :label "SX CI Pipeline" :href "/plans/sx-ci"
|
||||
:summary "Build, test, and deploy in s-expressions — CI pipelines as SX components.")))
|
||||
:summary "Build, test, and deploy in s-expressions — CI pipelines as SX components.")
|
||||
(dict :label "CSSX Components" :href "/plans/cssx-components"
|
||||
:summary "Styling as components — replace the style dictionary with regular defcomps that apply classes, respond to data, and compose naturally.")
|
||||
(dict :label "Live Streaming" :href "/plans/live-streaming"
|
||||
:summary "SSE and WebSocket transports for re-resolving suspense slots after initial page load — live data, real-time collaboration.")))
|
||||
|
||||
(define bootstrappers-nav-items (list
|
||||
(dict :label "Overview" :href "/bootstrappers/")
|
||||
|
||||
344
sx/sx/plans.sx
344
sx/sx/plans.sx
@@ -135,7 +135,7 @@
|
||||
|
||||
(~doc-section :title "Context" :id "context"
|
||||
(p "The web is six incompatible formats duct-taped together: HTML for structure, CSS for style, JavaScript for behavior, JSON for data, server languages for backend logic, build tools for compilation. Moving anything between layers requires serialization, template languages, API contracts, and glue code. Federation (ActivityPub) adds a seventh — JSON-LD — which is inert data that every consumer must interpret from scratch and wrap in their own UI.")
|
||||
(p "SX is already one evaluable format that does all six. A component definition is simultaneously structure, style (CSSX atoms), behavior (event handlers), data (the AST " (em "is") " data), server-renderable (Python evaluator), and client-renderable (JS evaluator). The pieces already exist: content-addressed DAG execution (artdag), IPFS storage with CIDs, OpenTimestamps Bitcoin anchoring, boundary-enforced sandboxing.")
|
||||
(p "SX is already one evaluable format that does all six. A component definition is simultaneously structure, style (components apply classes and respond to data), behavior (event handlers), data (the AST " (em "is") " data), server-renderable (Python evaluator), and client-renderable (JS evaluator). The pieces already exist: content-addressed DAG execution (artdag), IPFS storage with CIDs, OpenTimestamps Bitcoin anchoring, boundary-enforced sandboxing.")
|
||||
(p "SX-Activity wires these together into a new web. Everything — content, UI components, markdown parsers, syntax highlighters, validation logic, media, processing pipelines — is the same executable format, stored on a content-addressed network, running within each participant's own security context. " (strong "The wire format is the programming language is the component system is the package manager.")))
|
||||
|
||||
(~doc-section :title "Current State" :id "current-state"
|
||||
@@ -394,7 +394,7 @@
|
||||
|
||||
(~doc-subsection :title "The insight"
|
||||
(p "The web has six layers that don't talk to each other: HTML (structure), CSS (style), JavaScript (behavior), JSON (data interchange), server frameworks (backend logic), and build tools (compilation). Each has its own syntax, its own semantics, its own ecosystem. Moving data between them requires serialization, deserialization, template languages, API contracts, type coercion, and an endless parade of glue code.")
|
||||
(p "SX collapses all six into one evaluable format. A component definition is simultaneously structure, style (CSSX atoms), behavior (event handlers), data (the AST is data), server-renderable (Python evaluator), and client-renderable (JS evaluator). There is no boundary between \"data\" and \"program\" — s-expressions are both.")
|
||||
(p "SX collapses all six into one evaluable format. A component definition is simultaneously structure, style (components apply classes and respond to data), behavior (event handlers), data (the AST is data), server-renderable (Python evaluator), and client-renderable (JS evaluator). There is no boundary between \"data\" and \"program\" — s-expressions are both.")
|
||||
(p "Once that's true, " (strong "everything becomes shareable.") " Not just UI components — markdown parsers, syntax highlighters, date formatters, validation logic, layout algorithms, color systems, animation curves. Any pure function over data. All content-addressed, all on IPFS, all executable within your own security context."))
|
||||
|
||||
(~doc-subsection :title "What travels on the network"
|
||||
@@ -732,7 +732,7 @@
|
||||
(li (code ":cid") " — content address of the canonical serialized source")
|
||||
(li (code ":deps") " — dependency CIDs, not just names. A consumer can recursively resolve the entire tree by CID without name ambiguity")
|
||||
(li (code ":pure") " — pre-computed purity flag. The consumer " (em "re-verifies") " this after fetching (never trust the manifest alone), but it enables fast rejection of IO-dependent components before downloading")
|
||||
(li (code ":css-atoms") " — CSSX class names the component uses. The consumer can pre-resolve CSS rules without parsing the source")
|
||||
(li (code ":deps") " includes style component CIDs. No separate " (code ":css-atoms") " field needed — styling is just more components")
|
||||
(li (code ":params") " — parameter signature for tooling, documentation, IDE support")
|
||||
(li (code ":author") " — who published this. AP actor URL, verifiable via HTTP Signatures")))
|
||||
|
||||
@@ -1386,12 +1386,12 @@
|
||||
(p :class "text-sm text-stone-600" "OAuth-based sharing to Facebook, Instagram, Threads, Twitter/X, LinkedIn, and Mastodon via the account service. No models, blueprints, or platform clients created.")
|
||||
(p :class "text-sm text-stone-500 mt-1" "Remaining: SocialConnection model, social_crypto.py, platform OAuth clients (6), account/bp/social/ blueprint, share button fragment."))
|
||||
|
||||
(div :class "rounded border border-stone-200 bg-stone-50 p-4"
|
||||
(div :class "rounded border border-green-200 bg-green-50 p-4"
|
||||
(div :class "flex items-center gap-2 mb-1"
|
||||
(span :class "inline-block px-2 py-0.5 rounded text-xs font-bold bg-stone-500 text-white uppercase" "Not Started")
|
||||
(span :class "inline-block px-2 py-0.5 rounded text-xs font-bold bg-green-600 text-white uppercase" "Complete")
|
||||
(a :href "/isomorphism/" :class "font-semibold text-stone-800 underline" "Isomorphic Phase 6: Streaming & Suspense"))
|
||||
(p :class "text-sm text-stone-600" "Server streams partially-evaluated SX as IO resolves. Client renders available subtrees immediately, fills in suspended parts. Requires async-aware delimited continuations for suspension.")
|
||||
(p :class "text-sm text-stone-500 mt-1" "Depends on: Phase 5 (IO proxy), continuations spec."))
|
||||
(p :class "text-sm text-stone-600" "Server streams partially-evaluated SX as IO resolves. ~suspense component renders fallbacks, inline resolution scripts fill in content. Concurrent IO via asyncio, chunked transfer encoding.")
|
||||
(p :class "text-sm text-stone-500 mt-1" "Demo: " (a :href "/isomorphism/streaming" "/isomorphism/streaming")))
|
||||
|
||||
(div :class "rounded border border-stone-200 bg-stone-50 p-4"
|
||||
(div :class "flex items-center gap-2 mb-1"
|
||||
@@ -1570,12 +1570,13 @@
|
||||
(li (strong "eval/parse/render: ") "Complete both sides. sx-ref.js has eval, parse, render-to-html, render-to-dom, aser.")
|
||||
(li (strong "Engine: ") "engine.sx (morph, swaps, triggers, history), orchestration.sx (fetch, events), boot.sx (hydration) — all transpiled.")
|
||||
(li (strong "Wire format: ") "Server _aser → SX source → client parses → renders to DOM. Boundary is clean.")
|
||||
(li (strong "Component caching: ") "Hash-based localStorage for component definitions and style dictionaries.")
|
||||
(li (strong "CSS on-demand: ") "CSSX resolves keywords to CSS rules, injects only used rules.")
|
||||
(li (strong "Component caching: ") "Hash-based localStorage for component definitions.")
|
||||
(li (strong "Boundary enforcement: ") "boundary.sx + SX_BOUNDARY_STRICT=1 validates all primitives/IO/helpers at registration.")
|
||||
(li (strong "Dependency analysis: ") "deps.sx computes per-page component bundles — only definitions a page actually uses are sent.")
|
||||
(li (strong "IO detection: ") "deps.sx classifies every component as pure or IO-dependent. Server expands IO components, serializes pure ones for client.")
|
||||
(li (strong "Client-side routing: ") "router.sx matches URL patterns. Pure pages render instantly without server roundtrips. Pages with :data fall through to server transparently.")))
|
||||
(li (strong "Client-side routing: ") "router.sx matches URL patterns. Pure pages render instantly without server roundtrips. Pages with :data fall through to server transparently.")
|
||||
(li (strong "Client IO proxy: ") "IO primitives registered on the client call back to the server via fetch. Components with IO deps can render client-side.")
|
||||
(li (strong "Streaming/suspense: ") "defpage :stream true enables chunked HTML. ~suspense placeholders show loading skeletons; __sxResolve() fills in content as IO completes.")))
|
||||
|
||||
;; -----------------------------------------------------------------------
|
||||
;; Phase 1
|
||||
@@ -1882,39 +1883,86 @@
|
||||
|
||||
(~doc-section :title "Phase 6: Streaming & Suspense" :id "phase-6"
|
||||
|
||||
(div :class "rounded border border-violet-200 bg-violet-50 p-4 mb-4"
|
||||
(p :class "text-violet-900 font-medium" "What it enables")
|
||||
(p :class "text-violet-800" "Server streams partially-evaluated SX as IO resolves. Client renders available subtrees immediately, fills in suspended parts. Like React Suspense but built on delimited continuations."))
|
||||
(div :class "rounded border border-green-300 bg-green-50 p-4 mb-4"
|
||||
(div :class "flex items-center gap-2 mb-2"
|
||||
(span :class "inline-block px-2 py-0.5 rounded text-xs font-bold bg-green-600 text-white uppercase" "Complete")
|
||||
(a :href "/isomorphism/streaming" :class "text-green-700 underline text-sm font-medium" "Live streaming demo"))
|
||||
(p :class "text-green-900 font-medium" "What it enables")
|
||||
(p :class "text-green-800" "Server streams partially-evaluated SX as IO resolves. Client renders available subtrees immediately with loading skeletons, fills in suspended parts as data arrives."))
|
||||
|
||||
(div :class "rounded border border-amber-200 bg-amber-50 p-3 mb-4"
|
||||
(p :class "text-amber-800 text-sm" (strong "Prerequisite: ") "Async-aware delimited continuations. The client solved IO suspension via JavaScript Promises (Phase 5), but the server needs continuations to suspend mid-evaluation when IO is encountered during streaming. Python's evaluator must capture the continuation at an IO call, emit a placeholder, schedule the IO, and resume the continuation when the result arrives."))
|
||||
(~doc-subsection :title "What was built"
|
||||
(ul :class "list-disc pl-5 text-stone-700 space-y-1"
|
||||
(li (code "~suspense") " component — renders fallback content with a stable DOM ID, replaced when resolution arrives")
|
||||
(li (code "defpage :stream true") " — opts a page into streaming response mode")
|
||||
(li (code "defpage :fallback expr") " — custom loading skeleton for streaming pages")
|
||||
(li (code "execute_page_streaming()") " — Quart async generator response that yields HTML chunks")
|
||||
(li (code "sx_page_streaming_parts()") " — splits the HTML shell into streamable parts")
|
||||
(li (code "Sx.resolveSuspense(id, sx)") " — client-side function to replace suspense placeholders")
|
||||
(li (code "window.__sxResolve") " bootstrap — queues resolutions that arrive before sx.js loads")
|
||||
(li "Concurrent IO: data eval + header eval run in parallel via " (code "asyncio.create_task"))
|
||||
(li "Completion-order streaming: whichever IO finishes first gets sent first via " (code "asyncio.wait(FIRST_COMPLETED)"))))
|
||||
|
||||
|
||||
(~doc-subsection :title "Approach"
|
||||
(~doc-subsection :title "Architecture"
|
||||
|
||||
(div :class "space-y-4"
|
||||
(div
|
||||
(h4 :class "font-semibold text-stone-700" "1. Continuation-based suspension")
|
||||
(p "When _aser encounters IO during slot evaluation, emit a placeholder with a suspension ID, schedule async resolution:")
|
||||
(~doc-code :code (highlight "(~suspense :id \"placeholder-123\"\n :fallback (div \"Loading...\"))" "lisp")))
|
||||
(h4 :class "font-semibold text-stone-700" "1. Suspense component")
|
||||
(p "When streaming, the server renders the page with " (code "~suspense") " placeholders instead of awaiting IO:")
|
||||
(~doc-code :code (highlight "(~app-body\n :header-rows (~suspense :id \"stream-headers\"\n :fallback (div :class \"h-12 bg-stone-200 animate-pulse\"))\n :content (~suspense :id \"stream-content\"\n :fallback (div :class \"p-8 animate-pulse\" ...)))" "lisp")))
|
||||
|
||||
(div
|
||||
(h4 :class "font-semibold text-stone-700" "2. Chunked transfer")
|
||||
(p "Quart async generator responses:")
|
||||
(ul :class "list-disc pl-5 text-stone-700 space-y-1"
|
||||
(li "First chunk: HTML shell + synchronous content + placeholders")
|
||||
(li "Subsequent chunks: <script> tags replacing placeholders with resolved content")))
|
||||
(p "Quart async generator response yields chunks in order:")
|
||||
(ol :class "list-decimal pl-5 text-stone-700 space-y-1"
|
||||
(li "HTML shell + CSS + component defs + page registry + suspense page SX + scripts (immediate)")
|
||||
(li "Resolution " (code "<script>") " tags as each IO completes")
|
||||
(li "Closing " (code "</body></html>"))))
|
||||
|
||||
(div
|
||||
(h4 :class "font-semibold text-stone-700" "3. Client suspension rendering")
|
||||
(p "~suspense component renders fallback, listens for resolution via inline script or SSE (existing SSE infrastructure in orchestration.sx)."))
|
||||
(h4 :class "font-semibold text-stone-700" "3. Client resolution")
|
||||
(p "Each resolution chunk is an inline script:")
|
||||
(~doc-code :code (highlight "<script>\n window.__sxResolve(\"stream-content\",\n \"(~article :title \\\"Hello\\\")\")\n</script>" "html"))
|
||||
(p "The client parses the SX, renders to DOM, and replaces the suspense placeholder's children."))
|
||||
|
||||
(div
|
||||
(h4 :class "font-semibold text-stone-700" "4. Priority-based IO")
|
||||
(p "Above-fold content resolves first. All IO starts concurrently (asyncio.create_task), results flushed in priority order."))))
|
||||
(h4 :class "font-semibold text-stone-700" "4. Concurrent IO")
|
||||
(p "Data evaluation and header construction run in parallel. " (code "asyncio.wait(FIRST_COMPLETED)") " yields resolution chunks in whatever order IO completes — no artificial sequencing."))))
|
||||
|
||||
(div :class "rounded border border-amber-200 bg-amber-50 p-3 mt-2"
|
||||
(p :class "text-amber-800 text-sm" (strong "Depends on: ") "Phase 5 (IO proxy for client rendering), async-aware delimited continuations (for server-side suspension), Phase 2 (IO analysis for priority).")))
|
||||
(~doc-subsection :title "Continuation foundation"
|
||||
(p "Delimited continuations (" (code "reset") "/" (code "shift") ") are implemented in the Python evaluator (async_eval.py lines 586-624) and available as special forms. Phase 6 uses the simpler pattern of concurrent IO + completion-order streaming, but the continuation machinery is in place for Phase 7's more sophisticated evaluation-level suspension."))
|
||||
|
||||
(~doc-subsection :title "Files"
|
||||
(ul :class "list-disc pl-5 text-stone-700 space-y-1 font-mono text-sm"
|
||||
(li "shared/sx/templates/pages.sx — ~suspense component definition")
|
||||
(li "shared/sx/types.py — PageDef.stream, PageDef.fallback_expr fields")
|
||||
(li "shared/sx/evaluator.py — defpage :stream/:fallback parsing")
|
||||
(li "shared/sx/pages.py — execute_page_streaming(), streaming route mounting")
|
||||
(li "shared/sx/helpers.py — sx_page_streaming_parts(), sx_streaming_resolve_script()")
|
||||
(li "shared/sx/ref/boot.sx — resolve-suspense spec (canonical)")
|
||||
(li "shared/sx/ref/bootstrap_js.py — resolveSuspense on Sx object, __sxPending/Resolve init")
|
||||
(li "shared/static/scripts/sx-browser.js — bootstrapped output (DO NOT EDIT)")
|
||||
(li "shared/sx/async_eval.py — reset/shift special forms (continuation foundation)")
|
||||
(li "sx/sx/streaming-demo.sx — demo content component")
|
||||
(li "sx/sxc/pages/docs.sx — streaming-demo defpage")
|
||||
(li "sx/sxc/pages/helpers.py — streaming-demo-data page helper")))
|
||||
|
||||
(~doc-subsection :title "Demonstration"
|
||||
(p "The " (a :href "/isomorphism/streaming" :class "text-violet-700 underline" "streaming demo page") " exercises the full pipeline:")
|
||||
(ol :class "list-decimal pl-5 text-stone-700 space-y-1"
|
||||
(li "Navigate to " (a :href "/isomorphism/streaming" :class "text-violet-700 underline" "/isomorphism/streaming"))
|
||||
(li "The page skeleton appears " (strong "instantly") " — animated loading skeletons fill the content area")
|
||||
(li "After ~1.5 seconds, the real content replaces the skeletons (streamed from server)")
|
||||
(li "Open the Network tab — observe " (code "Transfer-Encoding: chunked") " on the document response")
|
||||
(li "The document response shows multiple chunks arriving over time: shell first, then resolution scripts")))
|
||||
|
||||
(~doc-subsection :title "What to verify"
|
||||
(ul :class "list-disc pl-5 text-stone-700 space-y-1"
|
||||
(li (strong "Instant shell: ") "The page HTML arrives immediately — no waiting for the 1.5s data fetch")
|
||||
(li (strong "Suspense placeholders: ") "The " (code "~suspense") " component renders a " (code "data-suspense") " wrapper with animated fallback content")
|
||||
(li (strong "Resolution: ") "The " (code "__sxResolve()") " inline script replaces the placeholder with real rendered content")
|
||||
(li (strong "Chunked encoding: ") "Network tab shows the document as a chunked response with multiple frames")
|
||||
(li (strong "Concurrent IO: ") "Header and content resolve independently — whichever finishes first appears first")
|
||||
(li (strong "HTMX fallback: ") "SX/HTMX requests bypass streaming and receive a standard response"))))
|
||||
|
||||
;; -----------------------------------------------------------------------
|
||||
;; Phase 7
|
||||
@@ -2172,3 +2220,239 @@
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/sx/ref/boundary.sx")
|
||||
(td :class "px-3 py-2 text-stone-700" "Add CI primitive declarations"))))))))
|
||||
|
||||
|
||||
;; ---------------------------------------------------------------------------
|
||||
;; CSSX Components
|
||||
;; ---------------------------------------------------------------------------
|
||||
|
||||
(defcomp ~plan-cssx-components-content ()
|
||||
(~doc-page :title "CSSX Components"
|
||||
|
||||
(~doc-section :title "Context" :id "context"
|
||||
(p "SX currently has a parallel CSS system: a style dictionary (JSON blob of atom-to-declaration mappings), a " (code "StyleValue") " type threaded through the evaluator and renderer, content-addressed hash class names (" (code "sx-a3f2b1") "), runtime CSS injection into " (code "<style id=\"sx-css\">") ", and a separate caching pipeline (" (code "<script type=\"text/sx-styles\">") ", localStorage, cookies).")
|
||||
(p "This is ~300 lines of spec code (cssx.sx) plus platform interface (hash, regex, injection), plus server-side infrastructure (css_registry.py, tw.css parsing). All to solve one problem: " (em "resolving keyword atoms like ") (code ":flex :gap-4 :hover:bg-sky-200") (em " into CSS at render time."))
|
||||
(p "The result: elements in the DOM get opaque class names like " (code "class=\"sx-a3f2b1\"") ". DevTools becomes useless. You can't inspect an element and understand its styling. " (strong "This is a deal breaker.")))
|
||||
|
||||
(~doc-section :title "The Idea" :id "idea"
|
||||
(p (strong "Styling is just components.") " A CSSX component is a regular " (code "defcomp") " that decides how to style its children. It might apply Tailwind classes, or hand-written CSS classes, or inline styles, or generate rules at runtime. The implementation is the component's private business. The consumer just calls " (code "(~btn :variant \"primary\" \"Submit\")") " and doesn't care.")
|
||||
(p "Because it's " (code "defcomp") ", you get everything for free: caching, bundling, dependency scanning, server/client rendering, composition. No parallel infrastructure.")
|
||||
(p "Key advantages:")
|
||||
(ul :class "list-disc pl-5 space-y-1 text-stone-700"
|
||||
(li (strong "Readable DOM: ") "Elements have real class names, not content-addressed hashes. DevTools works.")
|
||||
(li (strong "Data-driven styling: ") "Components receive data and decide styling. " (code "(~metric :value 150)") " renders red because " (code "value > 100") " — logic lives in the component, not a CSS preprocessor.")
|
||||
(li (strong "One system: ") "No separate " (code "StyleValue") " type, no style dictionary JSON, no " (code "<script type=\"text/sx-styles\">") ", no " (code "sx-css") " injection. Components ARE the styling abstraction.")
|
||||
(li (strong "One cache: ") "Component hash/localStorage handles everything. No separate style dict caching.")
|
||||
(li (strong "Composable: ") (code "(~card :elevated true (~metric :value v))") " — styling composes like any other component.")
|
||||
(li (strong "Strategy-agnostic: ") "A component can apply Tailwind classes, emit " (code "<style>") " blocks, use inline styles, generate CSS custom properties, or any combination. The consumer never knows or cares. Swap strategies without touching call sites.")))
|
||||
|
||||
(~doc-section :title "Examples" :id "examples"
|
||||
(~doc-subsection :title "Simple class mapping"
|
||||
(p "A button component that maps variant keywords to class strings:")
|
||||
(highlight
|
||||
"(defcomp ~btn (&key variant disabled &rest children)\n (button\n :class (str \"px-4 py-2 rounded font-medium transition \"\n (case variant\n \"primary\" \"bg-blue-600 text-white hover:bg-blue-700\"\n \"danger\" \"bg-red-600 text-white hover:bg-red-700\"\n \"ghost\" \"bg-transparent hover:bg-stone-100\"\n \"bg-stone-200 hover:bg-stone-300\")\n (when disabled \" opacity-50 cursor-not-allowed\"))\n :disabled disabled\n children))"
|
||||
"lisp"))
|
||||
|
||||
(~doc-subsection :title "Data-driven styling"
|
||||
(p "Styling that responds to data values — impossible with static CSS:")
|
||||
(highlight
|
||||
"(defcomp ~metric (&key value label threshold)\n (let ((t (or threshold 10)))\n (div :class (str \"p-3 rounded font-bold \"\n (cond\n ((> value (* t 10)) \"bg-red-500 text-white\")\n ((> value t) \"bg-amber-200 text-amber-900\")\n (:else \"bg-green-100 text-green-800\")))\n (span :class \"text-sm\" label) \": \" (span (str value)))))"
|
||||
"lisp"))
|
||||
|
||||
(~doc-subsection :title "Style functions"
|
||||
(p "Reusable style logic without wrapping — returns class strings:")
|
||||
(highlight
|
||||
"(define card-classes\n (fn (&key elevated bordered)\n (str \"rounded-lg p-4 \"\n (if elevated \"shadow-lg\" \"shadow-sm\")\n (when bordered \" border border-stone-200\"))))\n\n;; Usage: (div :class (card-classes :elevated true) ...)"
|
||||
"lisp"))
|
||||
|
||||
(~doc-subsection :title "Responsive and interactive"
|
||||
(p "Components can encode responsive breakpoints and interactive states as class strings — the same way you'd write Tailwind, but wrapped in a semantic component:")
|
||||
(highlight
|
||||
"(defcomp ~responsive-grid (&key cols &rest children)\n (div :class (str \"grid gap-4 \"\n (case (or cols 3)\n 1 \"grid-cols-1\"\n 2 \"grid-cols-1 md:grid-cols-2\"\n 3 \"grid-cols-1 md:grid-cols-2 lg:grid-cols-3\"\n 4 \"grid-cols-2 md:grid-cols-3 lg:grid-cols-4\"))\n children))"
|
||||
"lisp"))
|
||||
|
||||
(~doc-subsection :title "Emitting CSS directly"
|
||||
(p "Components are not limited to referencing existing classes. They can generate CSS — " (code "<style>") " tags, keyframes, custom properties — as part of their output:")
|
||||
(highlight
|
||||
"(defcomp ~pulse (&key color duration &rest children)\n (<>\n (style (str \"@keyframes sx-pulse {\"\n \"0%,100% { opacity:1 } 50% { opacity:.5 } }\"))\n (div :style (str \"animation: sx-pulse \" (or duration \"2s\") \" infinite;\"\n \"color:\" (or color \"inherit\"))\n children)))\n\n(defcomp ~theme (&key primary surface &rest children)\n (<>\n (style (str \":root {\"\n \"--color-primary:\" (or primary \"#7c3aed\") \";\"\n \"--color-surface:\" (or surface \"#fafaf9\") \"}\"))\n children))"
|
||||
"lisp")
|
||||
(p "The CSS strategy is the component's private implementation detail. Consumers call " (code "(~pulse :color \"red\" \"Loading...\")") " or " (code "(~theme :primary \"#2563eb\" ...)") " without knowing or caring whether the component uses classes, inline styles, generated rules, or all three.")))
|
||||
|
||||
(~doc-section :title "What Changes" :id "changes"
|
||||
|
||||
(~doc-subsection :title "Remove"
|
||||
(ul :class "list-disc pl-5 space-y-1 text-stone-700"
|
||||
(li (code "StyleValue") " type and all plumbing (type checks in eval, render, serialize)")
|
||||
(li (code "cssx.sx") " spec module (~300 lines: resolve-style, resolve-atom, split-variant, hash, injection)")
|
||||
(li "Style dictionary JSON format, loading, caching (" (code "<script type=\"text/sx-styles\">") ", " (code "initStyleDict") ", " (code "parseAndLoadStyleDict") ")")
|
||||
(li (code "<style id=\"sx-css\">") " runtime CSS injection system")
|
||||
(li (code "css_registry.py") " server-side (builds style dictionary from tw.css)")
|
||||
(li "Style dict cookies (" (code "sx-styles-hash") "), localStorage keys (" (code "sx-styles-src") ")")
|
||||
(li "Platform interface: " (code "fnv1a-hash") ", " (code "compile-regex") ", " (code "regex-match") ", " (code "regex-replace-groups") ", " (code "make-style-value") ", " (code "inject-style-value"))))
|
||||
|
||||
(~doc-subsection :title "Keep"
|
||||
(ul :class "list-disc pl-5 space-y-1 text-stone-700"
|
||||
(li (code "defstyle") " — already just " (code "(defstyle name expr)") " which binds name to a value. Stays as sugar for defining reusable style values/functions. No " (code "StyleValue") " type needed — the value can be a string, a function, anything.")
|
||||
(li (code "defkeyframes") " — could stay if we want declarative keyframe definitions. Or could become a component/function too.")
|
||||
(li (code "tw.css") " — the compiled Tailwind stylesheet. Components reference its classes directly. No runtime resolution needed.")
|
||||
(li (code ":class") " attribute — just takes strings now, no " (code "StyleValue") " special-casing.")))
|
||||
|
||||
(~doc-subsection :title "Add"
|
||||
(p "Nothing new to the spec. CSSX components are just " (code "defcomp") ". The only new thing is a convention: components whose primary purpose is styling. They live in the same component files, cache the same way, bundle the same way.")))
|
||||
|
||||
(~doc-section :title "Migration" :id "migration"
|
||||
(p "The existing codebase uses " (code ":class") " with plain Tailwind strings everywhere already. The CSSX style dictionary was an alternative path that was never widely adopted. Migration is mostly deletion:")
|
||||
(ol :class "list-decimal pl-5 space-y-2 text-stone-700"
|
||||
(li "Remove " (code "StyleValue") " type from " (code "types.py") ", " (code "render.sx") ", " (code "eval.sx") ", bootstrappers")
|
||||
(li "Remove " (code "cssx.sx") " from spec modules and bootstrapper")
|
||||
(li "Remove " (code "css_registry.py") " and style dict generation pipeline")
|
||||
(li "Remove style dict loading from " (code "boot.sx") " (" (code "initStyleDict") ", " (code "queryStyleScripts") ")")
|
||||
(li "Remove style-related cookies and localStorage from " (code "boot.sx") " platform interface")
|
||||
(li "Remove " (code "StyleValue") " special-casing from " (code "render-attrs") " in " (code "render.sx") " and DOM adapter")
|
||||
(li "Simplify " (code ":class") " / " (code ":style") " attribute handling — just strings")
|
||||
(li "Convert any existing " (code "defstyle") " uses to return plain class strings instead of " (code "StyleValue") " objects"))
|
||||
(p :class "mt-4 text-stone-600 italic" "Net effect: hundreds of lines of spec and infrastructure removed, zero new lines added. The component system already does everything CSSX was trying to do."))
|
||||
|
||||
(~doc-section :title "Relationship to Other Plans" :id "relationships"
|
||||
(ul :class "list-disc pl-5 space-y-1 text-stone-700"
|
||||
(li (strong "Content-Addressed Components: ") "CSSX components get CIDs like any other component. A " (code "~btn") " from one site can be shared to another via IPFS. No " (code ":css-atoms") " manifest field needed — the component carries its own styling logic.")
|
||||
(li (strong "Isomorphic Rendering: ") "Components render the same on server and client. No style injection timing issues, no FOUC from late CSS loading.")
|
||||
(li (strong "Component Bundling: ") "deps.sx already handles transitive component deps. Style components are just more components in the bundle — no separate style bundling.")))
|
||||
|
||||
(~doc-section :title "Comparison with CSS Technologies" :id "comparison"
|
||||
(p "CSSX components share DNA with several existing approaches but avoid the problems that make each one painful at scale.")
|
||||
|
||||
(~doc-subsection :title "styled-components / Emotion"
|
||||
(p (a :href "https://styled-components.com" :class "text-violet-600 hover:underline" "styled-components") " pioneered the idea that styling belongs in components. But it generates CSS at runtime, injects " (code "<style>") " tags, and produces opaque hashed class names (" (code "class=\"sc-bdfBwQ fNMpVx\"") "). Open DevTools and you see gibberish. It also carries significant runtime cost — parsing CSS template literals, hashing, deduplicating — and needs a separate SSR extraction step (" (code "ServerStyleSheet") ").")
|
||||
(p "CSSX components share the core insight (" (em "styling is a component concern") ") but without the runtime machinery. When a component applies Tailwind classes, there's zero CSS generation overhead. When it does emit " (code "<style>") " blocks, it's explicit — not hidden behind a tagged template literal. And the DOM is always readable."))
|
||||
|
||||
(~doc-subsection :title "CSS Modules"
|
||||
(p (a :href "https://github.com/css-modules/css-modules" :class "text-violet-600 hover:underline" "CSS Modules") " scope class names to avoid collisions by rewriting them at build time: " (code ".button") " becomes " (code ".button_abc123") ". This solves the global namespace problem but creates the same opacity issue — hashed names in the DOM that you can't grep for or reason about.")
|
||||
(p "CSSX components don't need scoping because component boundaries already provide isolation. A " (code "~btn") " owns its markup. There's nothing to collide with."))
|
||||
|
||||
(~doc-subsection :title "Tailwind CSS"
|
||||
(p "Tailwind is " (em "complementary") ", not competitive. CSSX components are the semantic layer on top. Raw Tailwind in markup — " (code ":class \"px-4 py-2 bg-blue-600 text-white font-medium rounded hover:bg-blue-700\"") " — is powerful but verbose and duplicated across call sites.")
|
||||
(p "A CSSX component wraps that string once: " (code "(~btn :variant \"primary\" \"Submit\")") ". The Tailwind classes are still there, readable in DevTools, but consumers don't repeat them. This is the same pattern Tailwind's own docs recommend (" (em "\"extracting components\"") ") — CSSX components are just SX's native way of doing it."))
|
||||
|
||||
(~doc-subsection :title "Vanilla Extract"
|
||||
(p (a :href "https://vanilla-extract.style" :class "text-violet-600 hover:underline" "Vanilla Extract") " is zero-runtime CSS-in-JS: styles are written in TypeScript, compiled to static CSS at build time, and referenced by generated class names. It avoids the runtime cost of styled-components but still requires a build step, a bundler plugin, and TypeScript. The generated class names are again opaque.")
|
||||
(p "CSSX components need no build step for styling — they're evaluated at render time like any other component. And since the component chooses its own strategy, it can reference pre-built classes (zero runtime) " (em "or") " generate CSS on the fly — same API either way."))
|
||||
|
||||
(~doc-subsection :title "Design Tokens / Style Dictionary"
|
||||
(p "The " (a :href "https://amzn.github.io/style-dictionary/" :class "text-violet-600 hover:underline" "Style Dictionary") " pattern — a JSON/YAML file mapping token names to values, compiled to platform-specific output — is essentially what the old CSSX was. It's the industry standard for design systems.")
|
||||
(p "The problem is that it's a parallel system: separate file format, separate build pipeline, separate caching, separate tooling. CSSX components eliminate all of that by expressing tokens as component parameters: " (code "(~theme :primary \"#7c3aed\")") " instead of " (code "{\"color\": {\"primary\": {\"value\": \"#7c3aed\"}}}") ". Same result, no parallel infrastructure.")))
|
||||
|
||||
(~doc-section :title "Philosophy" :id "philosophy"
|
||||
(p "The web has spent two decades building increasingly complex CSS tooling: preprocessors, CSS-in-JS, atomic CSS, utility frameworks, design tokens, style dictionaries. Each solves a real problem but adds a new system with its own caching, bundling, and mental model.")
|
||||
(p "CSSX components collapse all of this back to the simplest possible thing: " (strong "a function that takes data and returns markup with classes.") " That's what a component already is. There is no separate styling system because there doesn't need to be."))))
|
||||
|
||||
;; ---------------------------------------------------------------------------
|
||||
;; Live Streaming — SSE & WebSocket
|
||||
;; ---------------------------------------------------------------------------
|
||||
|
||||
(defcomp ~plan-live-streaming-content ()
|
||||
(~doc-page :title "Live Streaming"
|
||||
|
||||
(~doc-section :title "Context" :id "context"
|
||||
(p "SX streaming currently uses chunked transfer encoding: the server sends an HTML shell with "
|
||||
(code "~suspense") " placeholders, then resolves each one via inline "
|
||||
(code "<script>__sxResolve(id, sx)</script>") " chunks as IO completes. "
|
||||
"Once the response finishes, the connection closes. Each slot resolves exactly once.")
|
||||
(p "This is powerful for initial page load but doesn't support live updates "
|
||||
"— dashboard metrics, chat messages, collaborative editing, real-time notifications. "
|
||||
"For that we need a persistent transport: " (strong "SSE") " (Server-Sent Events) or " (strong "WebSockets") ".")
|
||||
(p "The key insight: the client already has " (code "Sx.resolveSuspense(id, sxSource)") " which replaces "
|
||||
"DOM content by suspense ID. A persistent connection just needs to keep calling it."))
|
||||
|
||||
(~doc-section :title "Design" :id "design"
|
||||
|
||||
(~doc-subsection :title "Transport Hierarchy"
|
||||
(p "Three tiers, progressively more capable:")
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li (strong "Chunked streaming") " (done) — single HTTP response, each suspense resolves once. "
|
||||
"Best for: initial page load with slow IO.")
|
||||
(li (strong "SSE") " — persistent one-way connection, server pushes resolve events. "
|
||||
"Best for: dashboards, notifications, progress bars, any read-only live data.")
|
||||
(li (strong "WebSocket") " — bidirectional, client can send events back. "
|
||||
"Best for: chat, collaborative editing, interactive applications.")))
|
||||
|
||||
(~doc-subsection :title "SSE Protocol"
|
||||
(p "A " (code "~live") " component declares a persistent connection to an SSE endpoint:")
|
||||
(~doc-code :code (highlight "(~live :src \"/api/stream/dashboard\"\n (~suspense :id \"cpu\" :fallback (span \"Loading...\"))\n (~suspense :id \"memory\" :fallback (span \"Loading...\"))\n (~suspense :id \"requests\" :fallback (span \"Loading...\")))" "lisp"))
|
||||
(p "The server SSE endpoint yields SX resolve events:")
|
||||
(~doc-code :code (highlight "async def dashboard_stream():\n while True:\n stats = await get_system_stats()\n yield sx_sse_event(\"cpu\", f'(~stat-badge :value \"{stats.cpu}%\")')\n yield sx_sse_event(\"memory\", f'(~stat-badge :value \"{stats.mem}%\")')\n await asyncio.sleep(1)" "python"))
|
||||
(p "SSE wire format — each event is a suspense resolve:")
|
||||
(~doc-code :code (highlight "event: sx-resolve\ndata: {\"id\": \"cpu\", \"sx\": \"(~stat-badge :value \\\"42%\\\")\"}\n\nevent: sx-resolve\ndata: {\"id\": \"memory\", \"sx\": \"(~stat-badge :value \\\"68%\\\")\"}" "text")))
|
||||
|
||||
(~doc-subsection :title "WebSocket Protocol"
|
||||
(p "A " (code "~ws") " component establishes a bidirectional channel:")
|
||||
(~doc-code :code (highlight "(~ws :src \"/ws/chat\"\n :on-message handle-chat-message\n (~suspense :id \"messages\" :fallback (div \"Connecting...\"))\n (~suspense :id \"typing\" :fallback (span)))" "lisp"))
|
||||
(p "Client can send SX expressions back:")
|
||||
(~doc-code :code (highlight ";; Client sends:\n(sx-send ws-conn '(chat-message :text \"hello\" :user \"alice\"))\n\n;; Server receives, broadcasts to all connected clients:\n;; event: sx-resolve for \"messages\" suspense" "lisp")))
|
||||
|
||||
(~doc-subsection :title "Shared Resolution Mechanism"
|
||||
(p "All three transports use the same client-side resolution:")
|
||||
(ul :class "list-disc list-inside space-y-1 text-stone-600 text-sm"
|
||||
(li (code "Sx.resolveSuspense(id, sxSource)") " — already exists, parses SX and renders to DOM")
|
||||
(li "SSE: " (code "EventSource") " → " (code "onmessage") " → " (code "resolveSuspense()"))
|
||||
(li "WS: " (code "WebSocket") " → " (code "onmessage") " → " (code "resolveSuspense()"))
|
||||
(li "The component env (defs needed for rendering) can be sent once on connection open")
|
||||
(li "Subsequent events only need the SX expression — lightweight wire format"))))
|
||||
|
||||
(~doc-section :title "Implementation" :id "implementation"
|
||||
|
||||
(~doc-subsection :title "Phase 1: SSE Infrastructure"
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li "Add " (code "~live") " component to " (code "shared/sx/templates/") " — renders child suspense placeholders, "
|
||||
"emits " (code "data-sx-live") " attribute with SSE endpoint URL")
|
||||
(li "Add " (code "sx-live.js") " client module — on boot, finds " (code "[data-sx-live]") " elements, "
|
||||
"opens EventSource, routes events to " (code "resolveSuspense()"))
|
||||
(li "Add " (code "sx_sse_event(id, sx)") " helper for Python SSE endpoints — formats SSE wire protocol")
|
||||
(li "Add " (code "sse_stream()") " Quart helper — returns async generator Response with correct headers")))
|
||||
|
||||
(~doc-subsection :title "Phase 2: Defpage Integration"
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li "New " (code ":live") " defpage slot — declares SSE endpoint + suspense bindings")
|
||||
(li "Auto-mount SSE endpoint alongside the page route")
|
||||
(li "Component defs sent as first SSE event on connection open")
|
||||
(li "Automatic reconnection with exponential backoff")))
|
||||
|
||||
(~doc-subsection :title "Phase 3: WebSocket"
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li "Add " (code "~ws") " component — bidirectional channel with send/receive")
|
||||
(li "Add " (code "sx-ws.js") " client module — WebSocket management, message routing")
|
||||
(li "Server-side: Quart WebSocket handlers that receive and broadcast SX events")
|
||||
(li "Client-side: " (code "sx-send") " primitive for sending SX expressions to server")))
|
||||
|
||||
(~doc-subsection :title "Phase 4: Spec & Boundary"
|
||||
(ol :class "list-decimal list-inside space-y-2 text-stone-700 text-sm"
|
||||
(li "Spec " (code "~live") " and " (code "~ws") " in " (code "render.sx") " (how they render in each mode)")
|
||||
(li "Add SSE/WS IO primitives to " (code "boundary.sx"))
|
||||
(li "Bootstrap SSE/WS connection management into " (code "sx-ref.js"))
|
||||
(li "Spec-level tests for resolve, reconnection, and message routing"))))
|
||||
|
||||
(~doc-section :title "Files" :id "files"
|
||||
(table :class "w-full text-left border-collapse"
|
||||
(thead
|
||||
(tr :class "border-b border-stone-200"
|
||||
(th :class "px-3 py-2 font-medium text-stone-600" "File")
|
||||
(th :class "px-3 py-2 font-medium text-stone-600" "Purpose")))
|
||||
(tbody
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/sx/templates/live.sx")
|
||||
(td :class "px-3 py-2 text-stone-700" "~live component definition"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/static/scripts/sx-live.js")
|
||||
(td :class "px-3 py-2 text-stone-700" "SSE client — EventSource → resolveSuspense"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/sx/sse.py")
|
||||
(td :class "px-3 py-2 text-stone-700" "SSE helpers — event formatting, stream response"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/static/scripts/sx-ws.js")
|
||||
(td :class "px-3 py-2 text-stone-700" "WebSocket client — bidirectional SX channel"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/sx/ref/render.sx")
|
||||
(td :class "px-3 py-2 text-stone-700" "Spec: ~live and ~ws rendering in all modes"))
|
||||
(tr :class "border-b border-stone-100"
|
||||
(td :class "px-3 py-2 font-mono text-sm text-violet-700" "shared/sx/ref/boundary.sx")
|
||||
(td :class "px-3 py-2 text-stone-700" "SSE/WS IO primitive declarations")))))))
|
||||
|
||||
|
||||
93
sx/sx/streaming-demo.sx
Normal file
93
sx/sx/streaming-demo.sx
Normal file
@@ -0,0 +1,93 @@
|
||||
;; Streaming & Suspense demo — Phase 6
|
||||
;;
|
||||
;; This page uses :stream true to enable chunked transfer encoding.
|
||||
;; The browser receives the HTML shell immediately with loading skeletons,
|
||||
;; then content fills in as each IO resolves at staggered intervals.
|
||||
;;
|
||||
;; The :data expression is an async generator that yields three chunks
|
||||
;; at 1s, 3s, and 5s. Each chunk resolves a different ~suspense slot.
|
||||
|
||||
;; Color map for stream chunk styling (all string keys for get compatibility)
|
||||
(define stream-colors
|
||||
{"green" {"border" "border-green-200" "bg" "bg-green-50" "title" "text-green-900"
|
||||
"text" "text-green-800" "sub" "text-green-700" "code" "bg-green-100"
|
||||
"dot" "bg-green-400"}
|
||||
"blue" {"border" "border-blue-200" "bg" "bg-blue-50" "title" "text-blue-900"
|
||||
"text" "text-blue-800" "sub" "text-blue-700" "code" "bg-blue-100"
|
||||
"dot" "bg-blue-400"}
|
||||
"amber" {"border" "border-amber-200" "bg" "bg-amber-50" "title" "text-amber-900"
|
||||
"text" "text-amber-800" "sub" "text-amber-700" "code" "bg-amber-100"
|
||||
"dot" "bg-amber-400"}})
|
||||
|
||||
;; Generic streamed content chunk — rendered once per yield from the
|
||||
;; async generator. The :content expression receives different bindings
|
||||
;; each time, and the _stream_id determines which ~suspense slot it fills.
|
||||
(defcomp ~streaming-demo-chunk (&key stream-label stream-color stream-message stream-time)
|
||||
(let ((colors (get stream-colors stream-color)))
|
||||
(div :class (str "rounded-lg border p-5 space-y-3 " (get colors "border") " " (get colors "bg"))
|
||||
(div :class "flex items-center gap-2"
|
||||
(div :class (str "w-3 h-3 rounded-full " (get colors "dot")))
|
||||
(h2 :class (str "text-lg font-semibold " (get colors "title")) stream-label))
|
||||
(p :class (get colors "text") stream-message)
|
||||
(p :class (str "text-sm " (get colors "sub"))
|
||||
"Resolved at: " (code :class (str "px-1 rounded " (get colors "code")) stream-time)))))
|
||||
|
||||
;; Skeleton placeholder for a stream slot
|
||||
(defcomp ~stream-skeleton ()
|
||||
(div :class "rounded-lg border border-stone-200 bg-stone-50 p-5 space-y-3 animate-pulse"
|
||||
(div :class "flex items-center gap-2"
|
||||
(div :class "w-3 h-3 rounded-full bg-stone-300")
|
||||
(div :class "h-6 bg-stone-200 rounded w-1/3"))
|
||||
(div :class "h-4 bg-stone-200 rounded w-2/3")
|
||||
(div :class "h-4 bg-stone-200 rounded w-1/2")))
|
||||
|
||||
;; Static layout — takes &rest children where the three suspense slots go.
|
||||
(defcomp ~streaming-demo-layout (&rest children)
|
||||
(div :class "space-y-8"
|
||||
(div :class "border-b border-stone-200 pb-6"
|
||||
(h1 :class "text-2xl font-bold text-stone-900" "Streaming & Suspense Demo")
|
||||
(p :class "mt-2 text-stone-600"
|
||||
"This page uses " (code :class "bg-stone-100 px-1 rounded text-violet-700" ":stream true")
|
||||
" in its defpage declaration. The browser receives the page skeleton instantly, "
|
||||
"then three IO sources resolve at staggered intervals (1s, 3s, 5s)."))
|
||||
|
||||
;; Slot: suspense placeholders (or resolved content)
|
||||
(div :class "grid gap-4" children)
|
||||
|
||||
;; Flow diagram
|
||||
(div :class "space-y-4"
|
||||
(h2 :class "text-lg font-semibold text-stone-800" "Streaming Flow")
|
||||
(div :class "grid gap-3"
|
||||
(map (fn (item)
|
||||
(div :class "flex items-start gap-3 rounded-lg border border-stone-200 bg-white p-4"
|
||||
(div :class "flex-shrink-0 w-8 h-8 rounded-full bg-violet-100 flex items-center justify-center text-violet-700 font-bold text-sm"
|
||||
(get item "label"))
|
||||
(p :class "text-stone-700 text-sm pt-1" (get item "detail"))))
|
||||
(list
|
||||
{:label "Shell" :detail "HTML shell with three suspense placeholders sent immediately"}
|
||||
{:label "Boot" :detail "sx-browser.js loads, renders fallback skeletons"}
|
||||
{:label "1s" :detail "Fast API responds — first skeleton replaced with green box"}
|
||||
{:label "3s" :detail "Database query completes — second skeleton replaced with blue box"}
|
||||
{:label "5s" :detail "ML inference finishes — third skeleton replaced with amber box"}))))
|
||||
|
||||
;; How it works
|
||||
(div :class "rounded-lg border border-violet-200 bg-violet-50 p-5 space-y-3"
|
||||
(h2 :class "text-lg font-semibold text-violet-900" "How Multi-Stream Works")
|
||||
(ol :class "list-decimal list-inside text-violet-800 space-y-2 text-sm"
|
||||
(li "Server evaluates " (code ":data") " — gets an " (em "async generator"))
|
||||
(li "HTML shell with three " (code "~suspense") " placeholders sent immediately")
|
||||
(li "Generator yields first chunk after 1s — server sends " (code "__sxResolve(\"stream-fast\", ...)"))
|
||||
(li "Generator yields second chunk after 3s — " (code "__sxResolve(\"stream-medium\", ...)"))
|
||||
(li "Generator yields third chunk after 5s — " (code "__sxResolve(\"stream-slow\", ...)"))
|
||||
(li "Each resolve replaces its skeleton independently")))
|
||||
|
||||
;; Technical details
|
||||
(div :class "rounded-lg border border-stone-200 bg-stone-50 p-4 text-sm space-y-2"
|
||||
(p :class "font-semibold text-stone-800" "Implementation details")
|
||||
(ul :class "list-disc list-inside text-stone-600 space-y-1"
|
||||
(li (code "defpage :stream true") " — opts the page into chunked transfer encoding")
|
||||
(li (code ":data") " helper is an async generator — each " (code "yield") " resolves a different suspense slot")
|
||||
(li "Each yield includes " (code "_stream_id") " matching a " (code "~suspense :id") " in the shell")
|
||||
(li (code ":content") " expression is re-evaluated with each yield's bindings")
|
||||
(li "Headers stream concurrently — independent of the data generator")
|
||||
(li "Future: SSE/WebSocket for re-resolving slots after initial page load")))))
|
||||
@@ -6,7 +6,7 @@
|
||||
;; Overview page
|
||||
;; ---------------------------------------------------------------------------
|
||||
|
||||
(defcomp ~testing-overview-content (&key server-results framework-source eval-source parser-source router-source render-source)
|
||||
(defcomp ~testing-overview-content (&key server-results framework-source eval-source parser-source router-source render-source deps-source engine-source)
|
||||
(~doc-page :title "Testing"
|
||||
(div :class "space-y-8"
|
||||
|
||||
@@ -35,6 +35,8 @@
|
||||
+-- test-parser.sx 39 tests: tokenizer, parser, serializer
|
||||
+-- test-router.sx 18 tests: route matching + param extraction
|
||||
+-- test-render.sx 23 tests: HTML rendering + components
|
||||
+-- test-deps.sx 33 tests: dependency analysis + IO detection
|
||||
+-- test-engine.sx 37 tests: trigger/swap/retry parsing
|
||||
|
||||
Runners:
|
||||
run.js Node.js — injects platform fns, runs specs
|
||||
@@ -51,7 +53,9 @@ Platform functions (5 total):
|
||||
Per-spec platform functions:
|
||||
parser: sx-parse, sx-serialize, make-symbol, make-keyword, ...
|
||||
router: (none — pure spec, uses bootstrapped functions)
|
||||
render: render-html (wraps parse + render-to-html)")))
|
||||
render: render-html (wraps parse + render-to-html)
|
||||
deps: test-env (returns current evaluation environment)
|
||||
engine: (none — pure spec, uses bootstrapped functions)")))
|
||||
|
||||
;; Server results
|
||||
(when server-results
|
||||
@@ -86,6 +90,8 @@ Per-spec platform functions:
|
||||
(textarea :id "test-spec-parser" :style "display:none" parser-source)
|
||||
(textarea :id "test-spec-router" :style "display:none" router-source)
|
||||
(textarea :id "test-spec-render" :style "display:none" render-source)
|
||||
(textarea :id "test-spec-deps" :style "display:none" deps-source)
|
||||
(textarea :id "test-spec-engine" :style "display:none" engine-source)
|
||||
(script :src (asset-url "/scripts/sx-test-runner.js")))
|
||||
|
||||
;; Test spec index
|
||||
@@ -107,7 +113,15 @@ Per-spec platform functions:
|
||||
(a :href "/testing/render" :class "block rounded-lg border border-stone-200 p-5 hover:border-violet-300 hover:bg-violet-50 transition-colors"
|
||||
(h3 :class "font-semibold text-stone-800" "Renderer")
|
||||
(p :class "text-sm text-stone-500" "23 tests — elements, attributes, void elements, fragments, escaping, control flow, components")
|
||||
(p :class "text-xs text-violet-600 mt-1" "test-render.sx"))))
|
||||
(p :class "text-xs text-violet-600 mt-1" "test-render.sx"))
|
||||
(a :href "/testing/deps" :class "block rounded-lg border border-stone-200 p-5 hover:border-violet-300 hover:bg-violet-50 transition-colors"
|
||||
(h3 :class "font-semibold text-stone-800" "Dependencies")
|
||||
(p :class "text-sm text-stone-500" "33 tests — scan-refs, transitive-deps, components-needed, IO detection, purity classification")
|
||||
(p :class "text-xs text-violet-600 mt-1" "test-deps.sx"))
|
||||
(a :href "/testing/engine" :class "block rounded-lg border border-stone-200 p-5 hover:border-violet-300 hover:bg-violet-50 transition-colors"
|
||||
(h3 :class "font-semibold text-stone-800" "Engine")
|
||||
(p :class "text-sm text-stone-500" "37 tests — parse-time, trigger specs, swap specs, retry logic, param filtering")
|
||||
(p :class "text-xs text-violet-600 mt-1" "test-engine.sx"))))
|
||||
|
||||
;; What it proves
|
||||
(div :class "rounded-lg border border-blue-200 bg-blue-50 p-5 space-y-3"
|
||||
|
||||
@@ -458,6 +458,27 @@
|
||||
:selected "Async IO")
|
||||
:content (~async-io-demo-content))
|
||||
|
||||
(defpage streaming-demo
|
||||
:path "/isomorphism/streaming"
|
||||
:auth :public
|
||||
:stream true
|
||||
:layout (:sx-section
|
||||
:section "Isomorphism"
|
||||
:sub-label "Isomorphism"
|
||||
:sub-href "/isomorphism/"
|
||||
:sub-nav (~section-nav :items isomorphism-nav-items :current "Streaming")
|
||||
:selected "Streaming")
|
||||
:shell (~streaming-demo-layout
|
||||
(~suspense :id "stream-fast" :fallback (~stream-skeleton))
|
||||
(~suspense :id "stream-medium" :fallback (~stream-skeleton))
|
||||
(~suspense :id "stream-slow" :fallback (~stream-skeleton)))
|
||||
:data (streaming-demo-data)
|
||||
:content (~streaming-demo-chunk
|
||||
:stream-label stream-label
|
||||
:stream-color stream-color
|
||||
:stream-message stream-message
|
||||
:stream-time stream-time))
|
||||
|
||||
;; Wildcard must come AFTER specific routes (first-match routing)
|
||||
(defpage isomorphism-page
|
||||
:path "/isomorphism/<slug>"
|
||||
@@ -513,6 +534,8 @@
|
||||
"glue-decoupling" (~plan-glue-decoupling-content)
|
||||
"social-sharing" (~plan-social-sharing-content)
|
||||
"sx-ci" (~plan-sx-ci-content)
|
||||
"cssx-components" (~plan-cssx-components-content)
|
||||
"live-streaming" (~plan-live-streaming-content)
|
||||
:else (~plans-index-content)))
|
||||
|
||||
;; ---------------------------------------------------------------------------
|
||||
@@ -535,7 +558,9 @@
|
||||
:eval-source eval-source
|
||||
:parser-source parser-source
|
||||
:router-source router-source
|
||||
:render-source render-source))
|
||||
:render-source render-source
|
||||
:deps-source deps-source
|
||||
:engine-source engine-source))
|
||||
|
||||
(defpage testing-page
|
||||
:path "/testing/<slug>"
|
||||
@@ -552,6 +577,8 @@
|
||||
"parser" (run-modular-tests "parser")
|
||||
"router" (run-modular-tests "router")
|
||||
"render" (run-modular-tests "render")
|
||||
"deps" (run-modular-tests "deps")
|
||||
"engine" (run-modular-tests "engine")
|
||||
:else (dict))
|
||||
:content (case slug
|
||||
"eval" (~testing-spec-content
|
||||
@@ -582,6 +609,20 @@
|
||||
:spec-source spec-source
|
||||
:framework-source framework-source
|
||||
:server-results server-results)
|
||||
"deps" (~testing-spec-content
|
||||
:spec-name "deps"
|
||||
:spec-title "Dependency Analysis Tests"
|
||||
:spec-desc "33 tests covering component dependency analysis — scan-refs, scan-components-from-source, transitive-deps, components-needed, scan-io-refs, and component-pure? classification."
|
||||
:spec-source spec-source
|
||||
:framework-source framework-source
|
||||
:server-results server-results)
|
||||
"engine" (~testing-spec-content
|
||||
:spec-name "engine"
|
||||
:spec-title "Engine Tests"
|
||||
:spec-desc "37 tests covering engine pure functions — parse-time, parse-trigger-spec, default-trigger, parse-swap-spec, parse-retry-spec, next-retry-ms, and filter-params."
|
||||
:spec-source spec-source
|
||||
:framework-source framework-source
|
||||
:server-results server-results)
|
||||
"runners" (~testing-runners-content)
|
||||
:else (~testing-overview-content
|
||||
:server-results server-results)))
|
||||
|
||||
@@ -26,6 +26,7 @@ def _register_sx_helpers() -> None:
|
||||
"data-test-data": _data_test_data,
|
||||
"run-spec-tests": _run_spec_tests,
|
||||
"run-modular-tests": _run_modular_tests,
|
||||
"streaming-demo-data": _streaming_demo_data,
|
||||
})
|
||||
|
||||
|
||||
@@ -706,6 +707,8 @@ def _run_modular_tests(spec_name: str) -> dict:
|
||||
"parser": {"file": "test-parser.sx", "needs": ["sx-parse"]},
|
||||
"router": {"file": "test-router.sx", "needs": []},
|
||||
"render": {"file": "test-render.sx", "needs": ["render-html"]},
|
||||
"deps": {"file": "test-deps.sx", "needs": []},
|
||||
"engine": {"file": "test-engine.sx", "needs": []},
|
||||
}
|
||||
|
||||
specs_to_run = list(SPECS.keys()) if spec_name == "all" else [spec_name]
|
||||
@@ -720,7 +723,7 @@ def _run_modular_tests(spec_name: str) -> dict:
|
||||
if not spec:
|
||||
continue
|
||||
|
||||
# Load router from bootstrap if needed
|
||||
# Load module functions from bootstrap
|
||||
if sn == "router":
|
||||
try:
|
||||
from shared.sx.ref.sx_ref import (
|
||||
@@ -739,6 +742,46 @@ def _run_modular_tests(spec_name: str) -> dict:
|
||||
env["make-route-segment"] = make_route_segment
|
||||
except ImportError:
|
||||
eval_file("router.sx")
|
||||
elif sn == "deps":
|
||||
try:
|
||||
from shared.sx.ref.sx_ref import (
|
||||
scan_refs, scan_components_from_source,
|
||||
transitive_deps, compute_all_deps,
|
||||
components_needed, page_component_bundle,
|
||||
page_css_classes, scan_io_refs,
|
||||
transitive_io_refs, compute_all_io_refs,
|
||||
component_pure_p,
|
||||
)
|
||||
env["scan-refs"] = scan_refs
|
||||
env["scan-components-from-source"] = scan_components_from_source
|
||||
env["transitive-deps"] = transitive_deps
|
||||
env["compute-all-deps"] = compute_all_deps
|
||||
env["components-needed"] = components_needed
|
||||
env["page-component-bundle"] = page_component_bundle
|
||||
env["page-css-classes"] = page_css_classes
|
||||
env["scan-io-refs"] = scan_io_refs
|
||||
env["transitive-io-refs"] = transitive_io_refs
|
||||
env["compute-all-io-refs"] = compute_all_io_refs
|
||||
env["component-pure?"] = component_pure_p
|
||||
env["test-env"] = lambda: env
|
||||
except ImportError:
|
||||
eval_file("deps.sx")
|
||||
env["test-env"] = lambda: env
|
||||
elif sn == "engine":
|
||||
try:
|
||||
from shared.sx.ref.sx_ref import (
|
||||
parse_time, parse_trigger_spec, default_trigger,
|
||||
parse_swap_spec, parse_retry_spec, filter_params,
|
||||
)
|
||||
env["parse-time"] = parse_time
|
||||
env["parse-trigger-spec"] = parse_trigger_spec
|
||||
env["default-trigger"] = default_trigger
|
||||
env["parse-swap-spec"] = parse_swap_spec
|
||||
env["parse-retry-spec"] = parse_retry_spec
|
||||
env["next-retry-ms"] = lambda cur, cap: min(cur * 2, cap)
|
||||
env["filter-params"] = filter_params
|
||||
except ImportError:
|
||||
eval_file("engine.sx")
|
||||
|
||||
eval_file(spec["file"])
|
||||
|
||||
@@ -762,6 +805,8 @@ def _run_modular_tests(spec_name: str) -> dict:
|
||||
result["parser-source"] = _read_spec_file("test-parser.sx")
|
||||
result["router-source"] = _read_spec_file("test-router.sx")
|
||||
result["render-source"] = _read_spec_file("test-render.sx")
|
||||
result["deps-source"] = _read_spec_file("test-deps.sx")
|
||||
result["engine-source"] = _read_spec_file("test-engine.sx")
|
||||
else:
|
||||
spec = SPECS.get(spec_name)
|
||||
if spec:
|
||||
@@ -791,3 +836,42 @@ def _data_test_data() -> dict:
|
||||
"phase": "Phase 4 — Client Async & IO Bridge",
|
||||
"transport": "SX wire format (text/sx)",
|
||||
}
|
||||
|
||||
|
||||
async def _streaming_demo_data():
|
||||
"""Multi-stream demo — yields three chunks at staggered intervals.
|
||||
|
||||
Each yield is a dict with _stream_id (matching a ~suspense :id in the
|
||||
shell) plus bindings for the :content expression. The streaming
|
||||
infrastructure detects the async generator and resolves each suspense
|
||||
placeholder as each chunk arrives.
|
||||
"""
|
||||
import asyncio
|
||||
from datetime import datetime, timezone
|
||||
|
||||
await asyncio.sleep(1)
|
||||
yield {
|
||||
"stream-id": "stream-fast",
|
||||
"stream-label": "Fast API",
|
||||
"stream-color": "green",
|
||||
"stream-message": "Responded in ~1 second",
|
||||
"stream-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
}
|
||||
|
||||
await asyncio.sleep(2) # 3s total
|
||||
yield {
|
||||
"stream-id": "stream-medium",
|
||||
"stream-label": "Database Query",
|
||||
"stream-color": "blue",
|
||||
"stream-message": "Query completed in ~3 seconds",
|
||||
"stream-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
}
|
||||
|
||||
await asyncio.sleep(2) # 5s total
|
||||
yield {
|
||||
"stream-id": "stream-slow",
|
||||
"stream-label": "ML Inference",
|
||||
"stream-color": "amber",
|
||||
"stream-message": "Model inference completed in ~5 seconds",
|
||||
"stream-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user