Restore stashed WIP: live streaming plan, forms, CI pipeline, streaming demo
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -361,6 +361,7 @@ class JSEmitter:
|
||||
"fetch-request": "fetchRequest",
|
||||
"fetch-location": "fetchLocation",
|
||||
"fetch-and-restore": "fetchAndRestore",
|
||||
"fetch-streaming": "fetchStreaming",
|
||||
"fetch-preload": "fetchPreload",
|
||||
"dom-query-by-id": "domQueryById",
|
||||
"dom-matches?": "domMatches",
|
||||
@@ -3123,6 +3124,134 @@ PLATFORM_ORCHESTRATION_JS = """
|
||||
}).catch(function() { location.reload(); });
|
||||
}
|
||||
|
||||
function fetchStreaming(target, url, headers) {
|
||||
// Streaming fetch for multi-stream pages.
|
||||
// First chunk = OOB SX swap (shell with skeletons).
|
||||
// Subsequent chunks = __sxResolve script tags filling suspense slots.
|
||||
var opts = { headers: headers };
|
||||
try {
|
||||
var h = new URL(url, location.href).hostname;
|
||||
if (h !== location.hostname &&
|
||||
(h.indexOf(".rose-ash.com") >= 0 || h.indexOf(".localhost") >= 0)) {
|
||||
opts.credentials = "include";
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
fetch(url, opts).then(function(resp) {
|
||||
if (!resp.ok || !resp.body) {
|
||||
// Fallback: non-streaming
|
||||
return resp.text().then(function(text) {
|
||||
text = stripComponentScripts(text);
|
||||
text = extractResponseCss(text);
|
||||
text = text.trim();
|
||||
if (text.charAt(0) === "(") {
|
||||
var dom = sxRender(text);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var reader = resp.body.getReader();
|
||||
var decoder = new TextDecoder();
|
||||
var buffer = "";
|
||||
var initialSwapDone = false;
|
||||
// Regex to match __sxResolve script tags
|
||||
var RESOLVE_START = "<script>window.__sxResolve&&window.__sxResolve(";
|
||||
var RESOLVE_END = ")</script>";
|
||||
|
||||
function processResolveScripts() {
|
||||
// Strip and load any extra component defs before resolve scripts
|
||||
buffer = stripSxScripts(buffer);
|
||||
var idx;
|
||||
while ((idx = buffer.indexOf(RESOLVE_START)) >= 0) {
|
||||
var endIdx = buffer.indexOf(RESOLVE_END, idx);
|
||||
if (endIdx < 0) break; // incomplete, wait for more data
|
||||
var argsStr = buffer.substring(idx + RESOLVE_START.length, endIdx);
|
||||
buffer = buffer.substring(endIdx + RESOLVE_END.length);
|
||||
// argsStr is: "stream-id","sx source"
|
||||
var commaIdx = argsStr.indexOf(",");
|
||||
if (commaIdx >= 0) {
|
||||
try {
|
||||
var id = JSON.parse(argsStr.substring(0, commaIdx));
|
||||
var sx = JSON.parse(argsStr.substring(commaIdx + 1));
|
||||
if (typeof Sx !== "undefined" && Sx.resolveSuspense) {
|
||||
Sx.resolveSuspense(id, sx);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[sx-ref] resolve parse error:", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function pump() {
|
||||
return reader.read().then(function(result) {
|
||||
buffer += decoder.decode(result.value || new Uint8Array(), { stream: !result.done });
|
||||
|
||||
if (!initialSwapDone) {
|
||||
// Look for the first resolve script — everything before it is OOB content
|
||||
var scriptIdx = buffer.indexOf("<script>window.__sxResolve");
|
||||
// If we found a script tag, or the stream is done, process OOB
|
||||
var oobEnd = scriptIdx >= 0 ? scriptIdx : (result.done ? buffer.length : -1);
|
||||
if (oobEnd >= 0) {
|
||||
var oobContent = buffer.substring(0, oobEnd);
|
||||
buffer = buffer.substring(oobEnd);
|
||||
initialSwapDone = true;
|
||||
|
||||
// Process OOB SX content (same as fetchAndRestore)
|
||||
oobContent = stripComponentScripts(oobContent);
|
||||
// Also strip bare <script type="text/sx"> (extra defs from resolve chunks)
|
||||
oobContent = stripSxScripts(oobContent);
|
||||
oobContent = extractResponseCss(oobContent);
|
||||
oobContent = oobContent.trim();
|
||||
if (oobContent.charAt(0) === "(") {
|
||||
try {
|
||||
var dom = sxRender(oobContent);
|
||||
var container = document.createElement("div");
|
||||
container.appendChild(dom);
|
||||
processOobSwaps(container, function(t, oob, s) {
|
||||
swapDomNodes(t, oob, s);
|
||||
sxHydrate(t);
|
||||
processElements(t);
|
||||
});
|
||||
var newMain = container.querySelector("#main-panel");
|
||||
morphChildren(target, newMain || container);
|
||||
postSwap(target);
|
||||
// Dispatch clientRoute so nav links update active state
|
||||
domDispatch(target, "sx:clientRoute",
|
||||
{ pathname: new URL(url, location.href).pathname });
|
||||
} catch (err) {
|
||||
console.error("[sx-ref] streaming OOB swap error:", err);
|
||||
}
|
||||
}
|
||||
// Process any resolve scripts already in buffer
|
||||
processResolveScripts();
|
||||
}
|
||||
} else {
|
||||
// Process resolve scripts as they arrive
|
||||
processResolveScripts();
|
||||
}
|
||||
|
||||
if (!result.done) return pump();
|
||||
});
|
||||
}
|
||||
|
||||
return pump();
|
||||
}).catch(function(err) {
|
||||
console.error("[sx-ref] streaming fetch error:", err);
|
||||
location.reload();
|
||||
});
|
||||
}
|
||||
|
||||
function fetchPreload(url, headers, cache) {
|
||||
fetch(url, { headers: headers }).then(function(resp) {
|
||||
if (!resp.ok) return;
|
||||
@@ -3587,6 +3716,14 @@ PLATFORM_ORCHESTRATION_JS = """
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function stripSxScripts(text) {
|
||||
// Strip <script type="text/sx">...</script> (without data-components).
|
||||
// These contain extra component defs from streaming resolve chunks.
|
||||
var SxObj = typeof Sx !== "undefined" ? Sx : null;
|
||||
return text.replace(/<script[^>]*type="text\\/sx"[^>]*>([\\s\\S]*?)<\\/script>/gi,
|
||||
function(_, defs) { if (SxObj && SxObj.loadComponents) SxObj.loadComponents(defs); return ""; });
|
||||
}
|
||||
|
||||
function extractResponseCss(text) {
|
||||
if (!_hasDom) return text;
|
||||
var target = document.getElementById("sx-css");
|
||||
|
||||
@@ -1255,11 +1255,15 @@ def make_page_def(name, slots, env):
|
||||
if isinstance(layout, Keyword):
|
||||
layout = layout.name
|
||||
cache = None
|
||||
stream_val = slots.get("stream")
|
||||
stream = bool(trampoline(eval_expr(stream_val, env))) if stream_val is not None else False
|
||||
return PageDef(
|
||||
name=name, path=path, auth=auth, layout=layout, cache=cache,
|
||||
data_expr=slots.get("data"), content_expr=slots.get("content"),
|
||||
filter_expr=slots.get("filter"), aside_expr=slots.get("aside"),
|
||||
menu_expr=slots.get("menu"), closure=dict(env),
|
||||
menu_expr=slots.get("menu"), stream=stream,
|
||||
fallback_expr=slots.get("fallback"), shell_expr=slots.get("shell"),
|
||||
closure=dict(env),
|
||||
)
|
||||
|
||||
|
||||
@@ -1790,7 +1794,7 @@ PRIMITIVES["concat"] = lambda *args: _b_sum((a for a in args if a), [])
|
||||
PRIMITIVES["list"] = lambda *args: _b_list(args)
|
||||
PRIMITIVES["dict"] = lambda *args: {args[i]: args[i+1] for i in _b_range(0, _b_len(args)-1, 2)}
|
||||
PRIMITIVES["range"] = lambda a, b, step=1: _b_list(_b_range(_b_int(a), _b_int(b), _b_int(step)))
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else default)
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else (c.get(k, default) if hasattr(c, 'get') else default))
|
||||
PRIMITIVES["len"] = lambda c: _b_len(c) if c is not None and c is not NIL else 0
|
||||
PRIMITIVES["first"] = lambda c: c[0] if c and _b_len(c) > 0 else NIL
|
||||
PRIMITIVES["last"] = lambda c: c[-1] if c and _b_len(c) > 0 else NIL
|
||||
@@ -1807,6 +1811,7 @@ PRIMITIVES["zip-pairs"] = lambda c: [[c[i], c[i+1]] for i in _b_range(_b_len(c)-
|
||||
PRIMITIVES["keys"] = lambda d: _b_list((d or {}).keys())
|
||||
PRIMITIVES["vals"] = lambda d: _b_list((d or {}).values())
|
||||
PRIMITIVES["merge"] = lambda *args: _sx_merge_dicts(*args)
|
||||
PRIMITIVES["has-key?"] = lambda d, k: isinstance(d, _b_dict) and k in d
|
||||
PRIMITIVES["assoc"] = lambda d, *kvs: _sx_assoc(d, *kvs)
|
||||
PRIMITIVES["dissoc"] = lambda d, *ks: {k: v for k, v in d.items() if k not in ks}
|
||||
PRIMITIVES["into"] = lambda target, coll: (_b_list(coll) if isinstance(target, _b_list) else {p[0]: p[1] for p in coll if isinstance(p, _b_list) and _b_len(p) >= 2})
|
||||
@@ -1986,6 +1991,8 @@ trim = PRIMITIVES["trim"]
|
||||
replace = PRIMITIVES["replace"]
|
||||
parse_int = PRIMITIVES["parse-int"]
|
||||
upper = PRIMITIVES["upper"]
|
||||
has_key_p = PRIMITIVES["has-key?"]
|
||||
dissoc = PRIMITIVES["dissoc"]
|
||||
'''
|
||||
|
||||
|
||||
|
||||
@@ -116,3 +116,114 @@
|
||||
(let ((pdef (make-page-def name slots env)))
|
||||
(env-set! env (str "page:" name) pdef)
|
||||
pdef))))
|
||||
|
||||
|
||||
;; ==========================================================================
|
||||
;; Page Execution Semantics
|
||||
;; ==========================================================================
|
||||
;;
|
||||
;; A PageDef describes what to render for a route. The host evaluates slots
|
||||
;; at request time. This section specifies the data → content protocol that
|
||||
;; every host must implement identically.
|
||||
;;
|
||||
;; Slots (all unevaluated AST):
|
||||
;; :path — route pattern (string)
|
||||
;; :auth — "public" | "login" | "admin"
|
||||
;; :layout — layout reference + kwargs
|
||||
;; :stream — boolean, opt into chunked transfer
|
||||
;; :shell — immediate content (contains ~suspense placeholders)
|
||||
;; :fallback — loading skeleton for single-stream mode
|
||||
;; :data — IO expression producing bindings
|
||||
;; :content — template expression evaluated with data bindings
|
||||
;; :filter, :aside, :menu — additional content slots
|
||||
;;
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Data Protocol
|
||||
;; --------------------------------------------------------------------------
|
||||
;;
|
||||
;; The :data expression is evaluated at request time. It returns one of:
|
||||
;;
|
||||
;; 1. A dict — single-stream mode (default).
|
||||
;; Each key becomes an env binding (underscores → hyphens).
|
||||
;; Then :content is evaluated once with those bindings.
|
||||
;; Result resolves the "stream-content" suspense slot.
|
||||
;;
|
||||
;; 2. A sequence of dicts — multi-stream mode.
|
||||
;; The host delivers items over time (async generator, channel, etc.).
|
||||
;; Each dict:
|
||||
;; - MUST contain "stream-id" → string matching a ~suspense :id
|
||||
;; - Remaining keys become env bindings (underscores → hyphens)
|
||||
;; - :content is re-evaluated with those bindings
|
||||
;; - Result resolves the ~suspense slot matching "stream-id"
|
||||
;; If "stream-id" is absent, defaults to "stream-content".
|
||||
;;
|
||||
;; The host is free to choose the timing mechanism:
|
||||
;; Python — async generator (yield dicts at intervals)
|
||||
;; Go — channel of dicts
|
||||
;; Haskell — conduit / streaming
|
||||
;; JS — async iterator
|
||||
;;
|
||||
;; The spec requires:
|
||||
;; (a) Each item's bindings are isolated (fresh env per item)
|
||||
;; (b) :content is evaluated independently for each item
|
||||
;; (c) Resolution is incremental — each item resolves as it arrives
|
||||
;; (d) "stream-id" routes to the correct ~suspense slot
|
||||
;;
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Streaming Execution Order
|
||||
;; --------------------------------------------------------------------------
|
||||
;;
|
||||
;; When :stream is true:
|
||||
;;
|
||||
;; 1. Evaluate :shell (if present) → HTML for immediate content slot
|
||||
;; :shell typically contains ~suspense placeholders with :fallback
|
||||
;; 2. Render HTML shell with suspense placeholders → send to client
|
||||
;; 3. Start :data evaluation concurrently with header resolution
|
||||
;; 4. As each data item arrives:
|
||||
;; a. Bind item keys into fresh env
|
||||
;; b. Evaluate :content with those bindings → SX wire format
|
||||
;; c. Send resolve script: __sxResolve(stream-id, sx)
|
||||
;; 5. Close response when all items + headers have resolved
|
||||
;;
|
||||
;; Non-streaming pages evaluate :data then :content sequentially and
|
||||
;; return the complete page in a single response.
|
||||
;;
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Spec helpers for multi-stream data protocol
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
;; Extract stream-id from a data chunk dict, defaulting to "stream-content"
|
||||
(define stream-chunk-id
|
||||
(fn (chunk)
|
||||
(if (has-key? chunk "stream-id")
|
||||
(get chunk "stream-id")
|
||||
"stream-content")))
|
||||
|
||||
;; Remove stream-id from chunk, returning only the bindings
|
||||
(define stream-chunk-bindings
|
||||
(fn (chunk)
|
||||
(dissoc chunk "stream-id")))
|
||||
|
||||
;; Normalize binding keys: underscore → hyphen
|
||||
(define normalize-binding-key
|
||||
(fn (key)
|
||||
(replace key "_" "-")))
|
||||
|
||||
;; Bind a data chunk's keys into a fresh env (isolated per chunk)
|
||||
(define bind-stream-chunk
|
||||
(fn (chunk base-env)
|
||||
(let ((env (merge {} base-env))
|
||||
(bindings (stream-chunk-bindings chunk)))
|
||||
(for-each
|
||||
(fn (key)
|
||||
(env-set! env (normalize-binding-key key)
|
||||
(get bindings key)))
|
||||
(keys bindings))
|
||||
env)))
|
||||
|
||||
;; Validate a multi-stream data result: must be a list of dicts
|
||||
(define validate-stream-data
|
||||
(fn (data)
|
||||
(and (= (type-of data) "list")
|
||||
(every? (fn (item) (= (type-of item) "dict")) data))))
|
||||
|
||||
@@ -664,6 +664,15 @@
|
||||
(has-io (and io-deps (not (empty? io-deps)))))
|
||||
;; Ensure IO deps are registered as proxied primitives
|
||||
(when has-io (register-io-deps io-deps))
|
||||
(if (get match "stream")
|
||||
;; Streaming page: fetch with streaming body reader.
|
||||
;; First chunk = OOB SX swap (shell with skeletons),
|
||||
;; subsequent chunks = resolve scripts filling slots.
|
||||
(do (log-info (str "sx:route streaming " pathname))
|
||||
(fetch-streaming target pathname
|
||||
(build-request-headers target
|
||||
(loaded-component-names) _css-hash))
|
||||
true)
|
||||
(if (get match "has-data")
|
||||
;; Data page: check cache, else resolve asynchronously
|
||||
(let ((cache-key (page-data-cache-key page-name params))
|
||||
@@ -727,7 +736,7 @@
|
||||
(do (log-info (str "sx:route server (eval failed) " pathname)) false)
|
||||
(do
|
||||
(swap-rendered-content target rendered pathname)
|
||||
true)))))))))))))))))
|
||||
true))))))))))))))))))
|
||||
|
||||
|
||||
(define bind-client-route-link
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# WARNING: special-forms.sx declares forms not in eval.sx: reset, shift
|
||||
"""
|
||||
sx_ref.py -- Generated from reference SX evaluator specification.
|
||||
|
||||
@@ -191,11 +192,15 @@ def make_page_def(name, slots, env):
|
||||
if isinstance(layout, Keyword):
|
||||
layout = layout.name
|
||||
cache = None
|
||||
stream_val = slots.get("stream")
|
||||
stream = bool(trampoline(eval_expr(stream_val, env))) if stream_val is not None else False
|
||||
return PageDef(
|
||||
name=name, path=path, auth=auth, layout=layout, cache=cache,
|
||||
data_expr=slots.get("data"), content_expr=slots.get("content"),
|
||||
filter_expr=slots.get("filter"), aside_expr=slots.get("aside"),
|
||||
menu_expr=slots.get("menu"), closure=dict(env),
|
||||
menu_expr=slots.get("menu"), stream=stream,
|
||||
fallback_expr=slots.get("fallback"), shell_expr=slots.get("shell"),
|
||||
closure=dict(env),
|
||||
)
|
||||
|
||||
|
||||
@@ -736,7 +741,7 @@ PRIMITIVES["concat"] = lambda *args: _b_sum((a for a in args if a), [])
|
||||
PRIMITIVES["list"] = lambda *args: _b_list(args)
|
||||
PRIMITIVES["dict"] = lambda *args: {args[i]: args[i+1] for i in _b_range(0, _b_len(args)-1, 2)}
|
||||
PRIMITIVES["range"] = lambda a, b, step=1: _b_list(_b_range(_b_int(a), _b_int(b), _b_int(step)))
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else default)
|
||||
PRIMITIVES["get"] = lambda c, k, default=NIL: c.get(k, default) if isinstance(c, _b_dict) else (c[k] if isinstance(c, (_b_list, str)) and isinstance(k, _b_int) and 0 <= k < _b_len(c) else (c.get(k, default) if hasattr(c, 'get') else default))
|
||||
PRIMITIVES["len"] = lambda c: _b_len(c) if c is not None and c is not NIL else 0
|
||||
PRIMITIVES["first"] = lambda c: c[0] if c and _b_len(c) > 0 else NIL
|
||||
PRIMITIVES["last"] = lambda c: c[-1] if c and _b_len(c) > 0 else NIL
|
||||
@@ -752,6 +757,7 @@ PRIMITIVES["zip-pairs"] = lambda c: [[c[i], c[i+1]] for i in _b_range(_b_len(c)-
|
||||
PRIMITIVES["keys"] = lambda d: _b_list((d or {}).keys())
|
||||
PRIMITIVES["vals"] = lambda d: _b_list((d or {}).values())
|
||||
PRIMITIVES["merge"] = lambda *args: _sx_merge_dicts(*args)
|
||||
PRIMITIVES["has-key?"] = lambda d, k: isinstance(d, _b_dict) and k in d
|
||||
PRIMITIVES["assoc"] = lambda d, *kvs: _sx_assoc(d, *kvs)
|
||||
PRIMITIVES["dissoc"] = lambda d, *ks: {k: v for k, v in d.items() if k not in ks}
|
||||
PRIMITIVES["into"] = lambda target, coll: (_b_list(coll) if isinstance(target, _b_list) else {p[0]: p[1] for p in coll if isinstance(p, _b_list) and _b_len(p) >= 2})
|
||||
@@ -888,6 +894,8 @@ trim = PRIMITIVES["trim"]
|
||||
replace = PRIMITIVES["replace"]
|
||||
parse_int = PRIMITIVES["parse-int"]
|
||||
upper = PRIMITIVES["upper"]
|
||||
has_key_p = PRIMITIVES["has-key?"]
|
||||
dissoc = PRIMITIVES["dissoc"]
|
||||
|
||||
|
||||
# =========================================================================
|
||||
@@ -1128,6 +1136,21 @@ sf_defaction = lambda args, env: (lambda name_sym: (lambda params_raw: (lambda n
|
||||
# sf-defpage
|
||||
sf_defpage = lambda args, env: (lambda name_sym: (lambda name: (lambda slots: _sx_begin((lambda i: (lambda max_i: for_each(lambda idx: ((_sx_dict_set(slots, keyword_name(nth(args, idx)), nth(args, (idx + 1))) if sx_truthy(((idx + 1) < max_i)) else NIL) if sx_truthy(((idx < max_i) if not sx_truthy((idx < max_i)) else (type_of(nth(args, idx)) == 'keyword'))) else NIL), range(1, max_i, 2)))(len(args)))(1), (lambda pdef: _sx_begin(_sx_dict_set(env, sx_str('page:', name), pdef), pdef))(make_page_def(name, slots, env))))({}))(symbol_name(name_sym)))(first(args))
|
||||
|
||||
# stream-chunk-id
|
||||
stream_chunk_id = lambda chunk: (get(chunk, 'stream-id') if sx_truthy(has_key_p(chunk, 'stream-id')) else 'stream-content')
|
||||
|
||||
# stream-chunk-bindings
|
||||
stream_chunk_bindings = lambda chunk: dissoc(chunk, 'stream-id')
|
||||
|
||||
# normalize-binding-key
|
||||
normalize_binding_key = lambda key: replace(key, '_', '-')
|
||||
|
||||
# bind-stream-chunk
|
||||
bind_stream_chunk = lambda chunk, base_env: (lambda env: (lambda bindings: _sx_begin(for_each(lambda key: _sx_dict_set(env, normalize_binding_key(key), get(bindings, key)), keys(bindings)), env))(stream_chunk_bindings(chunk)))(merge({}, base_env))
|
||||
|
||||
# validate-stream-data
|
||||
validate_stream_data = lambda data: ((type_of(data) == 'list') if not sx_truthy((type_of(data) == 'list')) else every_p(lambda item: (type_of(item) == 'dict'), data))
|
||||
|
||||
|
||||
# === Transpiled from render (core) ===
|
||||
|
||||
@@ -1261,6 +1284,169 @@ compute_all_io_refs = lambda env, io_names: for_each(lambda name: (lambda val: (
|
||||
component_pure_p = lambda name, env, io_names: empty_p(transitive_io_refs(name, env, io_names))
|
||||
|
||||
|
||||
# === Transpiled from engine (fetch/swap/trigger pure logic) ===
|
||||
|
||||
# ENGINE_VERBS
|
||||
ENGINE_VERBS = ['get', 'post', 'put', 'delete', 'patch']
|
||||
|
||||
# DEFAULT_SWAP
|
||||
DEFAULT_SWAP = 'outerHTML'
|
||||
|
||||
# parse-time
|
||||
parse_time = lambda s: (0 if sx_truthy(is_nil(s)) else (parse_int(s, 0) if sx_truthy(ends_with_p(s, 'ms')) else ((parse_int(replace(s, 's', ''), 0) * 1000) if sx_truthy(ends_with_p(s, 's')) else parse_int(s, 0))))
|
||||
|
||||
# parse-trigger-spec
|
||||
parse_trigger_spec = lambda spec: (NIL if sx_truthy(is_nil(spec)) else (lambda raw_parts: filter(lambda x: (not sx_truthy(is_nil(x))), map(lambda part: (lambda tokens: (NIL if sx_truthy(empty_p(tokens)) else ({'event': 'every', 'modifiers': {'interval': parse_time(nth(tokens, 1))}} if sx_truthy(((first(tokens) == 'every') if not sx_truthy((first(tokens) == 'every')) else (len(tokens) >= 2))) else (lambda mods: _sx_begin(for_each(lambda tok: (_sx_dict_set(mods, 'once', True) if sx_truthy((tok == 'once')) else (_sx_dict_set(mods, 'changed', True) if sx_truthy((tok == 'changed')) else (_sx_dict_set(mods, 'delay', parse_time(slice(tok, 6))) if sx_truthy(starts_with_p(tok, 'delay:')) else (_sx_dict_set(mods, 'from', slice(tok, 5)) if sx_truthy(starts_with_p(tok, 'from:')) else NIL)))), rest(tokens)), {'event': first(tokens), 'modifiers': mods}))({}))))(split(trim(part), ' ')), raw_parts)))(split(spec, ',')))
|
||||
|
||||
# default-trigger
|
||||
default_trigger = lambda tag_name: ([{'event': 'submit', 'modifiers': {}}] if sx_truthy((tag_name == 'FORM')) else ([{'event': 'change', 'modifiers': {}}] if sx_truthy(((tag_name == 'INPUT') if sx_truthy((tag_name == 'INPUT')) else ((tag_name == 'SELECT') if sx_truthy((tag_name == 'SELECT')) else (tag_name == 'TEXTAREA')))) else [{'event': 'click', 'modifiers': {}}]))
|
||||
|
||||
# get-verb-info
|
||||
get_verb_info = lambda el: some(lambda verb: (lambda url: ({'method': upper(verb), 'url': url} if sx_truthy(url) else NIL))(dom_get_attr(el, sx_str('sx-', verb))), ENGINE_VERBS)
|
||||
|
||||
# build-request-headers
|
||||
build_request_headers = lambda el, loaded_components, css_hash: (lambda headers: _sx_begin((lambda target_sel: (_sx_dict_set(headers, 'SX-Target', target_sel) if sx_truthy(target_sel) else NIL))(dom_get_attr(el, 'sx-target')), (_sx_dict_set(headers, 'SX-Components', join(',', loaded_components)) if sx_truthy((not sx_truthy(empty_p(loaded_components)))) else NIL), (_sx_dict_set(headers, 'SX-Css', css_hash) if sx_truthy(css_hash) else NIL), (lambda extra_h: ((lambda parsed: (for_each(lambda key: _sx_dict_set(headers, key, sx_str(get(parsed, key))), keys(parsed)) if sx_truthy(parsed) else NIL))(parse_header_value(extra_h)) if sx_truthy(extra_h) else NIL))(dom_get_attr(el, 'sx-headers')), headers))({'SX-Request': 'true', 'SX-Current-URL': browser_location_href()})
|
||||
|
||||
# process-response-headers
|
||||
process_response_headers = lambda get_header: {'redirect': get_header('SX-Redirect'), 'refresh': get_header('SX-Refresh'), 'trigger': get_header('SX-Trigger'), 'retarget': get_header('SX-Retarget'), 'reswap': get_header('SX-Reswap'), 'location': get_header('SX-Location'), 'replace-url': get_header('SX-Replace-Url'), 'css-hash': get_header('SX-Css-Hash'), 'trigger-swap': get_header('SX-Trigger-After-Swap'), 'trigger-settle': get_header('SX-Trigger-After-Settle'), 'content-type': get_header('Content-Type')}
|
||||
|
||||
# parse-swap-spec
|
||||
def parse_swap_spec(raw_swap, global_transitions_p):
|
||||
_cells = {}
|
||||
parts = split((raw_swap if sx_truthy(raw_swap) else DEFAULT_SWAP), ' ')
|
||||
style = first(parts)
|
||||
_cells['use_transition'] = global_transitions_p
|
||||
for p in rest(parts):
|
||||
if sx_truthy((p == 'transition:true')):
|
||||
_cells['use_transition'] = True
|
||||
elif sx_truthy((p == 'transition:false')):
|
||||
_cells['use_transition'] = False
|
||||
return {'style': style, 'transition': _cells['use_transition']}
|
||||
|
||||
# parse-retry-spec
|
||||
parse_retry_spec = lambda retry_attr: (NIL if sx_truthy(is_nil(retry_attr)) else (lambda parts: {'strategy': first(parts), 'start-ms': parse_int(nth(parts, 1), 1000), 'cap-ms': parse_int(nth(parts, 2), 30000)})(split(retry_attr, ':')))
|
||||
|
||||
# next-retry-ms
|
||||
next_retry_ms = lambda current_ms, cap_ms: min((current_ms * 2), cap_ms)
|
||||
|
||||
# filter-params
|
||||
filter_params = lambda params_spec, all_params: (all_params if sx_truthy(is_nil(params_spec)) else ([] if sx_truthy((params_spec == 'none')) else (all_params if sx_truthy((params_spec == '*')) else ((lambda excluded: filter(lambda p: (not sx_truthy(contains_p(excluded, first(p)))), all_params))(map(trim, split(slice(params_spec, 4), ','))) if sx_truthy(starts_with_p(params_spec, 'not ')) else (lambda allowed: filter(lambda p: contains_p(allowed, first(p)), all_params))(map(trim, split(params_spec, ',')))))))
|
||||
|
||||
# resolve-target
|
||||
resolve_target = lambda el: (lambda sel: (el if sx_truthy((is_nil(sel) if sx_truthy(is_nil(sel)) else (sel == 'this'))) else (dom_parent(el) if sx_truthy((sel == 'closest')) else dom_query(sel))))(dom_get_attr(el, 'sx-target'))
|
||||
|
||||
# apply-optimistic
|
||||
apply_optimistic = lambda el: (lambda directive: (NIL if sx_truthy(is_nil(directive)) else (lambda target: (lambda state: _sx_begin((_sx_begin(_sx_dict_set(state, 'opacity', dom_get_style(target, 'opacity')), dom_set_style(target, 'opacity', '0'), dom_set_style(target, 'pointer-events', 'none')) if sx_truthy((directive == 'remove')) else (_sx_begin(_sx_dict_set(state, 'disabled', dom_get_prop(target, 'disabled')), dom_set_prop(target, 'disabled', True)) if sx_truthy((directive == 'disable')) else ((lambda cls: _sx_begin(_sx_dict_set(state, 'add-class', cls), dom_add_class(target, cls)))(slice(directive, 10)) if sx_truthy(starts_with_p(directive, 'add-class:')) else NIL))), state))({'target': target, 'directive': directive}))((resolve_target(el) if sx_truthy(resolve_target(el)) else el))))(dom_get_attr(el, 'sx-optimistic'))
|
||||
|
||||
# revert-optimistic
|
||||
revert_optimistic = lambda state: ((lambda target: (lambda directive: (_sx_begin(dom_set_style(target, 'opacity', (get(state, 'opacity') if sx_truthy(get(state, 'opacity')) else '')), dom_set_style(target, 'pointer-events', '')) if sx_truthy((directive == 'remove')) else (dom_set_prop(target, 'disabled', (get(state, 'disabled') if sx_truthy(get(state, 'disabled')) else False)) if sx_truthy((directive == 'disable')) else (dom_remove_class(target, get(state, 'add-class')) if sx_truthy(get(state, 'add-class')) else NIL))))(get(state, 'directive')))(get(state, 'target')) if sx_truthy(state) else NIL)
|
||||
|
||||
# find-oob-swaps
|
||||
find_oob_swaps = lambda container: (lambda results: _sx_begin(for_each(lambda attr: (lambda oob_els: for_each(lambda oob: (lambda swap_type: (lambda target_id: _sx_begin(dom_remove_attr(oob, attr), (_sx_append(results, {'element': oob, 'swap-type': swap_type, 'target-id': target_id}) if sx_truthy(target_id) else NIL)))(dom_id(oob)))((dom_get_attr(oob, attr) if sx_truthy(dom_get_attr(oob, attr)) else 'outerHTML')), oob_els))(dom_query_all(container, sx_str('[', attr, ']'))), ['sx-swap-oob', 'hx-swap-oob']), results))([])
|
||||
|
||||
# morph-node
|
||||
morph_node = lambda old_node, new_node: (NIL if sx_truthy((dom_has_attr_p(old_node, 'sx-preserve') if sx_truthy(dom_has_attr_p(old_node, 'sx-preserve')) else dom_has_attr_p(old_node, 'sx-ignore'))) else (dom_replace_child(dom_parent(old_node), dom_clone(new_node), old_node) if sx_truthy(((not sx_truthy((dom_node_type(old_node) == dom_node_type(new_node)))) if sx_truthy((not sx_truthy((dom_node_type(old_node) == dom_node_type(new_node))))) else (not sx_truthy((dom_node_name(old_node) == dom_node_name(new_node)))))) else ((dom_set_text_content(old_node, dom_text_content(new_node)) if sx_truthy((not sx_truthy((dom_text_content(old_node) == dom_text_content(new_node))))) else NIL) if sx_truthy(((dom_node_type(old_node) == 3) if sx_truthy((dom_node_type(old_node) == 3)) else (dom_node_type(old_node) == 8))) else (_sx_begin(sync_attrs(old_node, new_node), (morph_children(old_node, new_node) if sx_truthy((not sx_truthy((dom_is_active_element_p(old_node) if not sx_truthy(dom_is_active_element_p(old_node)) else dom_is_input_element_p(old_node))))) else NIL)) if sx_truthy((dom_node_type(old_node) == 1)) else NIL))))
|
||||
|
||||
# sync-attrs
|
||||
sync_attrs = _sx_fn(lambda old_el, new_el: (
|
||||
for_each(lambda attr: (lambda name: (lambda val: (dom_set_attr(old_el, name, val) if sx_truthy((not sx_truthy((dom_get_attr(old_el, name) == val)))) else NIL))(nth(attr, 1)))(first(attr)), dom_attr_list(new_el)),
|
||||
for_each(lambda attr: (dom_remove_attr(old_el, first(attr)) if sx_truthy((not sx_truthy(dom_has_attr_p(new_el, first(attr))))) else NIL), dom_attr_list(old_el))
|
||||
)[-1])
|
||||
|
||||
# morph-children
|
||||
def morph_children(old_parent, new_parent):
|
||||
_cells = {}
|
||||
old_kids = dom_child_list(old_parent)
|
||||
new_kids = dom_child_list(new_parent)
|
||||
old_by_id = reduce(lambda acc, kid: (lambda id: (_sx_begin(_sx_dict_set(acc, id, kid), acc) if sx_truthy(id) else acc))(dom_id(kid)), {}, old_kids)
|
||||
_cells['oi'] = 0
|
||||
for new_child in new_kids:
|
||||
match_id = dom_id(new_child)
|
||||
match_by_id = (dict_get(old_by_id, match_id) if sx_truthy(match_id) else NIL)
|
||||
if sx_truthy((match_by_id if not sx_truthy(match_by_id) else (not sx_truthy(is_nil(match_by_id))))):
|
||||
if sx_truthy(((_cells['oi'] < len(old_kids)) if not sx_truthy((_cells['oi'] < len(old_kids))) else (not sx_truthy((match_by_id == nth(old_kids, _cells['oi'])))))):
|
||||
dom_insert_before(old_parent, match_by_id, (nth(old_kids, _cells['oi']) if sx_truthy((_cells['oi'] < len(old_kids))) else NIL))
|
||||
morph_node(match_by_id, new_child)
|
||||
_cells['oi'] = (_cells['oi'] + 1)
|
||||
elif sx_truthy((_cells['oi'] < len(old_kids))):
|
||||
old_child = nth(old_kids, _cells['oi'])
|
||||
if sx_truthy((dom_id(old_child) if not sx_truthy(dom_id(old_child)) else (not sx_truthy(match_id)))):
|
||||
dom_insert_before(old_parent, dom_clone(new_child), old_child)
|
||||
else:
|
||||
morph_node(old_child, new_child)
|
||||
_cells['oi'] = (_cells['oi'] + 1)
|
||||
else:
|
||||
dom_append(old_parent, dom_clone(new_child))
|
||||
return for_each(lambda i: ((lambda leftover: (dom_remove_child(old_parent, leftover) if sx_truthy((dom_is_child_of_p(leftover, old_parent) if not sx_truthy(dom_is_child_of_p(leftover, old_parent)) else ((not sx_truthy(dom_has_attr_p(leftover, 'sx-preserve'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(leftover, 'sx-preserve')))) else (not sx_truthy(dom_has_attr_p(leftover, 'sx-ignore')))))) else NIL))(nth(old_kids, i)) if sx_truthy((i >= _cells['oi'])) else NIL), range(_cells['oi'], len(old_kids)))
|
||||
|
||||
# swap-dom-nodes
|
||||
swap_dom_nodes = lambda target, new_nodes, strategy: _sx_case(strategy, [('innerHTML', lambda: (morph_children(target, new_nodes) if sx_truthy(dom_is_fragment_p(new_nodes)) else (lambda wrapper: _sx_begin(dom_append(wrapper, new_nodes), morph_children(target, wrapper)))(dom_create_element('div', NIL)))), ('outerHTML', lambda: (lambda parent: _sx_begin(((lambda fc: (_sx_begin(morph_node(target, fc), (lambda sib: insert_remaining_siblings(parent, target, sib))(dom_next_sibling(fc))) if sx_truthy(fc) else dom_remove_child(parent, target)))(dom_first_child(new_nodes)) if sx_truthy(dom_is_fragment_p(new_nodes)) else morph_node(target, new_nodes)), parent))(dom_parent(target))), ('afterend', lambda: dom_insert_after(target, new_nodes)), ('beforeend', lambda: dom_append(target, new_nodes)), ('afterbegin', lambda: dom_prepend(target, new_nodes)), ('beforebegin', lambda: dom_insert_before(dom_parent(target), new_nodes, target)), ('delete', lambda: dom_remove_child(dom_parent(target), target)), ('none', lambda: NIL), (None, lambda: (morph_children(target, new_nodes) if sx_truthy(dom_is_fragment_p(new_nodes)) else (lambda wrapper: _sx_begin(dom_append(wrapper, new_nodes), morph_children(target, wrapper)))(dom_create_element('div', NIL))))])
|
||||
|
||||
# insert-remaining-siblings
|
||||
insert_remaining_siblings = lambda parent, ref_node, sib: ((lambda next: _sx_begin(dom_insert_after(ref_node, sib), insert_remaining_siblings(parent, sib, next)))(dom_next_sibling(sib)) if sx_truthy(sib) else NIL)
|
||||
|
||||
# swap-html-string
|
||||
swap_html_string = lambda target, html, strategy: _sx_case(strategy, [('innerHTML', lambda: dom_set_inner_html(target, html)), ('outerHTML', lambda: (lambda parent: _sx_begin(dom_insert_adjacent_html(target, 'afterend', html), dom_remove_child(parent, target), parent))(dom_parent(target))), ('afterend', lambda: dom_insert_adjacent_html(target, 'afterend', html)), ('beforeend', lambda: dom_insert_adjacent_html(target, 'beforeend', html)), ('afterbegin', lambda: dom_insert_adjacent_html(target, 'afterbegin', html)), ('beforebegin', lambda: dom_insert_adjacent_html(target, 'beforebegin', html)), ('delete', lambda: dom_remove_child(dom_parent(target), target)), ('none', lambda: NIL), (None, lambda: dom_set_inner_html(target, html))])
|
||||
|
||||
# handle-history
|
||||
handle_history = lambda el, url, resp_headers: (lambda push_url: (lambda replace_url: (lambda hdr_replace: (browser_replace_state(hdr_replace) if sx_truthy(hdr_replace) else (browser_push_state((url if sx_truthy((push_url == 'true')) else push_url)) if sx_truthy((push_url if not sx_truthy(push_url) else (not sx_truthy((push_url == 'false'))))) else (browser_replace_state((url if sx_truthy((replace_url == 'true')) else replace_url)) if sx_truthy((replace_url if not sx_truthy(replace_url) else (not sx_truthy((replace_url == 'false'))))) else NIL))))(get(resp_headers, 'replace-url')))(dom_get_attr(el, 'sx-replace-url')))(dom_get_attr(el, 'sx-push-url'))
|
||||
|
||||
# PRELOAD_TTL
|
||||
PRELOAD_TTL = 30000
|
||||
|
||||
# preload-cache-get
|
||||
preload_cache_get = lambda cache, url: (lambda entry: (NIL if sx_truthy(is_nil(entry)) else (_sx_begin(dict_delete(cache, url), NIL) if sx_truthy(((now_ms() - get(entry, 'timestamp')) > PRELOAD_TTL)) else _sx_begin(dict_delete(cache, url), entry))))(dict_get(cache, url))
|
||||
|
||||
# preload-cache-set
|
||||
preload_cache_set = lambda cache, url, text, content_type: _sx_dict_set(cache, url, {'text': text, 'content-type': content_type, 'timestamp': now_ms()})
|
||||
|
||||
# classify-trigger
|
||||
classify_trigger = lambda trigger: (lambda event: ('poll' if sx_truthy((event == 'every')) else ('intersect' if sx_truthy((event == 'intersect')) else ('load' if sx_truthy((event == 'load')) else ('revealed' if sx_truthy((event == 'revealed')) else 'event')))))(get(trigger, 'event'))
|
||||
|
||||
# should-boost-link?
|
||||
should_boost_link_p = lambda link: (lambda href: (href if not sx_truthy(href) else ((not sx_truthy(starts_with_p(href, '#'))) if not sx_truthy((not sx_truthy(starts_with_p(href, '#')))) else ((not sx_truthy(starts_with_p(href, 'javascript:'))) if not sx_truthy((not sx_truthy(starts_with_p(href, 'javascript:')))) else ((not sx_truthy(starts_with_p(href, 'mailto:'))) if not sx_truthy((not sx_truthy(starts_with_p(href, 'mailto:')))) else (browser_same_origin_p(href) if not sx_truthy(browser_same_origin_p(href)) else ((not sx_truthy(dom_has_attr_p(link, 'sx-get'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(link, 'sx-get')))) else ((not sx_truthy(dom_has_attr_p(link, 'sx-post'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(link, 'sx-post')))) else (not sx_truthy(dom_has_attr_p(link, 'sx-disable')))))))))))(dom_get_attr(link, 'href'))
|
||||
|
||||
# should-boost-form?
|
||||
should_boost_form_p = lambda form: ((not sx_truthy(dom_has_attr_p(form, 'sx-get'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(form, 'sx-get')))) else ((not sx_truthy(dom_has_attr_p(form, 'sx-post'))) if not sx_truthy((not sx_truthy(dom_has_attr_p(form, 'sx-post')))) else (not sx_truthy(dom_has_attr_p(form, 'sx-disable')))))
|
||||
|
||||
# parse-sse-swap
|
||||
parse_sse_swap = lambda el: (dom_get_attr(el, 'sx-sse-swap') if sx_truthy(dom_get_attr(el, 'sx-sse-swap')) else 'message')
|
||||
|
||||
|
||||
# === Transpiled from router (client-side route matching) ===
|
||||
|
||||
# split-path-segments
|
||||
split_path_segments = lambda path: (lambda trimmed: (lambda trimmed2: ([] if sx_truthy(empty_p(trimmed2)) else split(trimmed2, '/')))((slice(trimmed, 0, (len(trimmed) - 1)) if sx_truthy(((not sx_truthy(empty_p(trimmed))) if not sx_truthy((not sx_truthy(empty_p(trimmed)))) else ends_with_p(trimmed, '/'))) else trimmed)))((slice(path, 1) if sx_truthy(starts_with_p(path, '/')) else path))
|
||||
|
||||
# make-route-segment
|
||||
make_route_segment = lambda seg: ((lambda param_name: (lambda d: _sx_begin(_sx_dict_set(d, 'type', 'param'), _sx_dict_set(d, 'value', param_name), d))({}))(slice(seg, 1, (len(seg) - 1))) if sx_truthy((starts_with_p(seg, '<') if not sx_truthy(starts_with_p(seg, '<')) else ends_with_p(seg, '>'))) else (lambda d: _sx_begin(_sx_dict_set(d, 'type', 'literal'), _sx_dict_set(d, 'value', seg), d))({}))
|
||||
|
||||
# parse-route-pattern
|
||||
parse_route_pattern = lambda pattern: (lambda segments: map(make_route_segment, segments))(split_path_segments(pattern))
|
||||
|
||||
# match-route-segments
|
||||
def match_route_segments(path_segs, parsed_segs):
|
||||
_cells = {}
|
||||
return (NIL if sx_truthy((not sx_truthy((len(path_segs) == len(parsed_segs))))) else (lambda params: _sx_begin(_sx_cell_set(_cells, 'matched', True), _sx_begin(for_each_indexed(lambda i, parsed_seg: ((lambda path_seg: (lambda seg_type: ((_sx_cell_set(_cells, 'matched', False) if sx_truthy((not sx_truthy((path_seg == get(parsed_seg, 'value'))))) else NIL) if sx_truthy((seg_type == 'literal')) else (_sx_dict_set(params, get(parsed_seg, 'value'), path_seg) if sx_truthy((seg_type == 'param')) else _sx_cell_set(_cells, 'matched', False))))(get(parsed_seg, 'type')))(nth(path_segs, i)) if sx_truthy(_cells['matched']) else NIL), parsed_segs), (params if sx_truthy(_cells['matched']) else NIL))))({}))
|
||||
|
||||
# match-route
|
||||
match_route = lambda path, pattern: (lambda path_segs: (lambda parsed_segs: match_route_segments(path_segs, parsed_segs))(parse_route_pattern(pattern)))(split_path_segments(path))
|
||||
|
||||
# find-matching-route
|
||||
def find_matching_route(path, routes):
|
||||
_cells = {}
|
||||
path_segs = split_path_segments(path)
|
||||
_cells['result'] = NIL
|
||||
for route in routes:
|
||||
if sx_truthy(is_nil(_cells['result'])):
|
||||
params = match_route_segments(path_segs, get(route, 'parsed'))
|
||||
if sx_truthy((not sx_truthy(is_nil(params)))):
|
||||
matched = merge(route, {})
|
||||
matched['params'] = params
|
||||
_cells['result'] = matched
|
||||
return _cells['result']
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Fixups -- wire up render adapter dispatch
|
||||
# =========================================================================
|
||||
|
||||
@@ -492,3 +492,177 @@
|
||||
(assert-equal 0 (reduce (fn (acc x) (+ acc x)) 0 (list)))
|
||||
(assert-equal 0 (len (list)))
|
||||
(assert-equal "" (str))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; defpage
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "defpage"
|
||||
(deftest "basic defpage returns page-def"
|
||||
(let ((p (defpage test-basic :path "/test" :auth :public :content (div "hello"))))
|
||||
(assert-true (not (nil? p)))
|
||||
(assert-equal "test-basic" (get p "name"))
|
||||
(assert-equal "/test" (get p "path"))
|
||||
(assert-equal "public" (get p "auth"))))
|
||||
|
||||
(deftest "defpage content expr is unevaluated AST"
|
||||
(let ((p (defpage test-content :path "/c" :auth :public :content (~my-comp :title "hi"))))
|
||||
(assert-true (not (nil? (get p "content"))))))
|
||||
|
||||
(deftest "defpage with :stream"
|
||||
(let ((p (defpage test-stream :path "/s" :auth :public :stream true :content (div "x"))))
|
||||
(assert-equal true (get p "stream"))))
|
||||
|
||||
(deftest "defpage with :shell"
|
||||
(let ((p (defpage test-shell :path "/sh" :auth :public :stream true
|
||||
:shell (~my-layout (~suspense :id "data" :fallback (div "loading...")))
|
||||
:content (~my-streamed :data data-val))))
|
||||
(assert-true (not (nil? (get p "shell"))))
|
||||
(assert-true (not (nil? (get p "content"))))))
|
||||
|
||||
(deftest "defpage with :fallback"
|
||||
(let ((p (defpage test-fallback :path "/f" :auth :public :stream true
|
||||
:fallback (div :class "skeleton" "loading")
|
||||
:content (div "done"))))
|
||||
(assert-true (not (nil? (get p "fallback"))))))
|
||||
|
||||
(deftest "defpage with :data"
|
||||
(let ((p (defpage test-data :path "/d" :auth :public
|
||||
:data (fetch-items)
|
||||
:content (~items-list :items items))))
|
||||
(assert-true (not (nil? (get p "data"))))))
|
||||
|
||||
(deftest "defpage missing fields are nil"
|
||||
(let ((p (defpage test-minimal :path "/m" :auth :public :content (div "x"))))
|
||||
(assert-nil (get p "data"))
|
||||
(assert-nil (get p "filter"))
|
||||
(assert-nil (get p "aside"))
|
||||
(assert-nil (get p "menu"))
|
||||
(assert-nil (get p "shell"))
|
||||
(assert-nil (get p "fallback"))
|
||||
(assert-equal false (get p "stream")))))
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Multi-stream data protocol (from forms.sx)
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "stream-chunk-id"
|
||||
(deftest "extracts stream-id from chunk"
|
||||
(assert-equal "my-slot" (stream-chunk-id {"stream-id" "my-slot" "x" 1})))
|
||||
|
||||
(deftest "defaults to stream-content when missing"
|
||||
(assert-equal "stream-content" (stream-chunk-id {"x" 1 "y" 2}))))
|
||||
|
||||
(defsuite "stream-chunk-bindings"
|
||||
(deftest "removes stream-id from chunk"
|
||||
(let ((bindings (stream-chunk-bindings {"stream-id" "slot" "name" "alice" "age" 30})))
|
||||
(assert-equal "alice" (get bindings "name"))
|
||||
(assert-equal 30 (get bindings "age"))
|
||||
(assert-nil (get bindings "stream-id"))))
|
||||
|
||||
(deftest "returns all keys when no stream-id"
|
||||
(let ((bindings (stream-chunk-bindings {"a" 1 "b" 2})))
|
||||
(assert-equal 1 (get bindings "a"))
|
||||
(assert-equal 2 (get bindings "b")))))
|
||||
|
||||
(defsuite "normalize-binding-key"
|
||||
(deftest "converts underscores to hyphens"
|
||||
(assert-equal "my-key" (normalize-binding-key "my_key")))
|
||||
|
||||
(deftest "leaves hyphens unchanged"
|
||||
(assert-equal "my-key" (normalize-binding-key "my-key")))
|
||||
|
||||
(deftest "handles multiple underscores"
|
||||
(assert-equal "a-b-c" (normalize-binding-key "a_b_c"))))
|
||||
|
||||
(defsuite "bind-stream-chunk"
|
||||
(deftest "creates fresh env with bindings"
|
||||
(let ((base {"existing" 42})
|
||||
(chunk {"stream-id" "slot" "user-name" "bob" "count" 5})
|
||||
(env (bind-stream-chunk chunk base)))
|
||||
;; Base env bindings are preserved
|
||||
(assert-equal 42 (get env "existing"))
|
||||
;; Chunk bindings are added (stream-id removed)
|
||||
(assert-equal "bob" (get env "user-name"))
|
||||
(assert-equal 5 (get env "count"))
|
||||
;; stream-id is not in env
|
||||
(assert-nil (get env "stream-id"))))
|
||||
|
||||
(deftest "isolates env from base — bindings don't leak to base"
|
||||
(let ((base {"x" 1})
|
||||
(chunk {"stream-id" "s" "y" 2})
|
||||
(env (bind-stream-chunk chunk base)))
|
||||
;; Chunk bindings should not appear in base
|
||||
(assert-nil (get base "y"))
|
||||
;; Base bindings should be in derived env
|
||||
(assert-equal 1 (get env "x")))))
|
||||
|
||||
(defsuite "validate-stream-data"
|
||||
(deftest "valid: list of dicts"
|
||||
(assert-true (validate-stream-data
|
||||
(list {"stream-id" "a" "x" 1}
|
||||
{"stream-id" "b" "y" 2}))))
|
||||
|
||||
(deftest "valid: empty list"
|
||||
(assert-true (validate-stream-data (list))))
|
||||
|
||||
(deftest "invalid: single dict (not a list)"
|
||||
(assert-equal false (validate-stream-data {"x" 1})))
|
||||
|
||||
(deftest "invalid: list containing non-dict"
|
||||
(assert-equal false (validate-stream-data (list {"x" 1} "oops" {"y" 2})))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Multi-stream end-to-end scenarios
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "multi-stream routing"
|
||||
(deftest "stream-chunk-id routes different chunks to different slots"
|
||||
(let ((chunks (list
|
||||
{"stream-id" "stream-fast" "msg" "quick"}
|
||||
{"stream-id" "stream-medium" "msg" "steady"}
|
||||
{"stream-id" "stream-slow" "msg" "slow"}))
|
||||
(ids (map stream-chunk-id chunks)))
|
||||
(assert-equal "stream-fast" (nth ids 0))
|
||||
(assert-equal "stream-medium" (nth ids 1))
|
||||
(assert-equal "stream-slow" (nth ids 2))))
|
||||
|
||||
(deftest "bind-stream-chunk creates isolated envs per chunk"
|
||||
(let ((base {"layout" "main"})
|
||||
(chunk-a {"stream-id" "a" "title" "First" "count" 1})
|
||||
(chunk-b {"stream-id" "b" "title" "Second" "count" 2})
|
||||
(env-a (bind-stream-chunk chunk-a base))
|
||||
(env-b (bind-stream-chunk chunk-b base)))
|
||||
;; Each env has its own bindings
|
||||
(assert-equal "First" (get env-a "title"))
|
||||
(assert-equal "Second" (get env-b "title"))
|
||||
(assert-equal 1 (get env-a "count"))
|
||||
(assert-equal 2 (get env-b "count"))
|
||||
;; Both share base
|
||||
(assert-equal "main" (get env-a "layout"))
|
||||
(assert-equal "main" (get env-b "layout"))
|
||||
;; Neither leaks into base
|
||||
(assert-nil (get base "title"))))
|
||||
|
||||
(deftest "normalize-binding-key applied to chunk keys"
|
||||
(let ((chunk {"stream-id" "s" "user_name" "alice" "item_count" 3})
|
||||
(bindings (stream-chunk-bindings chunk)))
|
||||
;; Keys with underscores need normalizing for SX env
|
||||
(assert-equal "alice" (get bindings "user_name"))
|
||||
;; normalize-binding-key converts them
|
||||
(assert-equal "user-name" (normalize-binding-key "user_name"))
|
||||
(assert-equal "item-count" (normalize-binding-key "item_count"))))
|
||||
|
||||
(deftest "defpage stream flag defaults to false"
|
||||
(let ((p (defpage test-no-stream :path "/ns" :auth :public :content (div "x"))))
|
||||
(assert-equal false (get p "stream"))))
|
||||
|
||||
(deftest "defpage stream true recorded in page-def"
|
||||
(let ((p (defpage test-with-stream :path "/ws" :auth :public
|
||||
:stream true
|
||||
:shell (~layout (~suspense :id "data"))
|
||||
:content (~chunk :val val))))
|
||||
(assert-equal true (get p "stream"))
|
||||
(assert-true (not (nil? (get p "shell")))))))
|
||||
|
||||
@@ -122,4 +122,40 @@
|
||||
;; /docs/ should match docs-index, not docs-page
|
||||
(let ((result (find-matching-route "/docs/" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal "docs-index" (get result "name"))))))
|
||||
(assert-equal "docs-index" (get result "name")))))
|
||||
|
||||
(deftest "propagates stream flag from route"
|
||||
(let ((routes (list
|
||||
{:pattern "/demo/streaming"
|
||||
:parsed (parse-route-pattern "/demo/streaming")
|
||||
:name "streaming-demo"
|
||||
:stream true
|
||||
:has-data true})))
|
||||
(let ((result (find-matching-route "/demo/streaming" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal true (get result "stream"))
|
||||
(assert-equal true (get result "has-data")))))
|
||||
|
||||
(deftest "non-streaming route has no stream flag"
|
||||
(let ((routes (list
|
||||
{:pattern "/about"
|
||||
:parsed (parse-route-pattern "/about")
|
||||
:name "about"
|
||||
:has-data false})))
|
||||
(let ((result (find-matching-route "/about" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-nil (get result "stream")))))
|
||||
|
||||
(deftest "streaming route with params propagates all properties"
|
||||
(let ((routes (list
|
||||
{:pattern "/stream/<id>"
|
||||
:parsed (parse-route-pattern "/stream/<id>")
|
||||
:name "stream-page"
|
||||
:stream true
|
||||
:has-data true
|
||||
:content "expr"})))
|
||||
(let ((result (find-matching-route "/stream/fast" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal true (get result "stream"))
|
||||
(assert-equal "fast" (get (get result "params") "id"))
|
||||
(assert-equal "expr" (get result "content"))))))
|
||||
|
||||
Reference in New Issue
Block a user