SX URL algebra: relative resolution, keyword ops, ! special forms
Extends router.sx with the full SX URL algebra — structural navigation (.slug, .., ...), keyword set/delta (.:page.4, .:page.+1), bare-dot shorthand, and ! special form parsing (!source, !inspect, !diff, !search, !raw, !json). All pure SX spec, bootstrapped to both Python and JS. Fixes: index-of -1/nil portability (_index-of-safe wrapper), variadic (+ a b c) transpilation bug (use nested binary +). Includes 115 passing tests covering all operations. Also: "The" strapline and essay title. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -14,7 +14,7 @@
|
||||
// =========================================================================
|
||||
|
||||
var NIL = Object.freeze({ _nil: true, toString: function() { return "nil"; } });
|
||||
var SX_VERSION = "2026-03-12T10:26:23Z";
|
||||
var SX_VERSION = "2026-03-12T18:28:35Z";
|
||||
|
||||
function isNil(x) { return x === NIL || x === null || x === undefined; }
|
||||
function isSxTruthy(x) { return x !== false && !isNil(x); }
|
||||
@@ -3739,6 +3739,189 @@ callExpr.push(dictGet(kwargs, k)); } }
|
||||
})();
|
||||
})()); };
|
||||
|
||||
// _count-leading-dots
|
||||
var _countLeadingDots = function(s) { return (isSxTruthy(isEmpty(s)) ? 0 : (isSxTruthy(startsWith(s, ".")) ? (1 + _countLeadingDots(slice(s, 1))) : 0)); };
|
||||
|
||||
// _strip-trailing-close
|
||||
var _stripTrailingClose = function(s) { return (isSxTruthy(endsWith(s, ")")) ? _stripTrailingClose(slice(s, 0, (len(s) - 1))) : s); };
|
||||
|
||||
// _index-of-safe
|
||||
var _indexOfSafe = function(s, needle) { return (function() {
|
||||
var idx = indexOf_(s, needle);
|
||||
return (isSxTruthy(sxOr(isNil(idx), (idx < 0))) ? NIL : idx);
|
||||
})(); };
|
||||
|
||||
// _last-index-of
|
||||
var _lastIndexOf = function(s, needle) { return (function() {
|
||||
var idx = _indexOfSafe(s, needle);
|
||||
return (isSxTruthy(isNil(idx)) ? NIL : (function() {
|
||||
var restIdx = _lastIndexOf(slice(s, (idx + 1)), needle);
|
||||
return (isSxTruthy(isNil(restIdx)) ? idx : ((idx + 1) + restIdx));
|
||||
})());
|
||||
})(); };
|
||||
|
||||
// _pop-sx-url-level
|
||||
var _popSxUrlLevel = function(url) { return (function() {
|
||||
var stripped = _stripTrailingClose(url);
|
||||
var closeCount = (len(url) - len(_stripTrailingClose(url)));
|
||||
return (isSxTruthy((closeCount <= 1)) ? "/" : (function() {
|
||||
var lastDp = _lastIndexOf(stripped, ".(");
|
||||
return (isSxTruthy(isNil(lastDp)) ? "/" : (String(slice(stripped, 0, lastDp)) + String(slice(url, (len(url) - (closeCount - 1))))));
|
||||
})());
|
||||
})(); };
|
||||
|
||||
// _pop-sx-url-levels
|
||||
var _popSxUrlLevels = function(url, n) { return (isSxTruthy((n <= 0)) ? url : _popSxUrlLevels(_popSxUrlLevel(url), (n - 1))); };
|
||||
|
||||
// _split-pos-kw
|
||||
var _splitPosKw = function(tokens, i, pos, kw) { return (isSxTruthy((i >= len(tokens))) ? {"positional": join(".", pos), "keywords": kw} : (function() {
|
||||
var tok = nth(tokens, i);
|
||||
return (isSxTruthy(startsWith(tok, ":")) ? (function() {
|
||||
var val = (isSxTruthy(((i + 1) < len(tokens))) ? nth(tokens, (i + 1)) : "");
|
||||
return _splitPosKw(tokens, (i + 2), pos, append(kw, [[tok, val]]));
|
||||
})() : _splitPosKw(tokens, (i + 1), append(pos, [tok]), kw));
|
||||
})()); };
|
||||
|
||||
// _parse-relative-body
|
||||
var _parseRelativeBody = function(body) { return (isSxTruthy(isEmpty(body)) ? {"positional": "", "keywords": []} : _splitPosKw(split(body, "."), 0, [], [])); };
|
||||
|
||||
// _extract-innermost
|
||||
var _extractInnermost = function(url) { return (function() {
|
||||
var stripped = _stripTrailingClose(url);
|
||||
var suffix = slice(url, len(_stripTrailingClose(url)));
|
||||
return (function() {
|
||||
var lastDp = _lastIndexOf(stripped, ".(");
|
||||
return (isSxTruthy(isNil(lastDp)) ? {"before": "/(", "content": slice(stripped, 2), "suffix": suffix} : {"before": slice(stripped, 0, (lastDp + 2)), "content": slice(stripped, (lastDp + 2)), "suffix": suffix});
|
||||
})();
|
||||
})(); };
|
||||
|
||||
// _find-kw-in-tokens
|
||||
var _findKwInTokens = function(tokens, i, kw) { return (isSxTruthy((i >= len(tokens))) ? NIL : (isSxTruthy((isSxTruthy((nth(tokens, i) == kw)) && ((i + 1) < len(tokens)))) ? nth(tokens, (i + 1)) : _findKwInTokens(tokens, (i + 1), kw))); };
|
||||
|
||||
// _find-keyword-value
|
||||
var _findKeywordValue = function(content, kw) { return _findKwInTokens(split(content, "."), 0, kw); };
|
||||
|
||||
// _replace-kw-in-tokens
|
||||
var _replaceKwInTokens = function(tokens, i, kw, value) { return (isSxTruthy((i >= len(tokens))) ? [] : (isSxTruthy((isSxTruthy((nth(tokens, i) == kw)) && ((i + 1) < len(tokens)))) ? append([kw, value], _replaceKwInTokens(tokens, (i + 2), kw, value)) : cons(nth(tokens, i), _replaceKwInTokens(tokens, (i + 1), kw, value)))); };
|
||||
|
||||
// _set-keyword-in-content
|
||||
var _setKeywordInContent = function(content, kw, value) { return (function() {
|
||||
var current = _findKeywordValue(content, kw);
|
||||
return (isSxTruthy(isNil(current)) ? (String(content) + String(".") + String(kw) + String(".") + String(value)) : join(".", _replaceKwInTokens(split(content, "."), 0, kw, value)));
|
||||
})(); };
|
||||
|
||||
// _is-delta-value?
|
||||
var _isDeltaValue_p = function(s) { return (isSxTruthy(!isSxTruthy(isEmpty(s))) && isSxTruthy((len(s) > 1)) && sxOr(startsWith(s, "+"), startsWith(s, "-"))); };
|
||||
|
||||
// _apply-delta
|
||||
var _applyDelta = function(currentStr, deltaStr) { return (function() {
|
||||
var cur = parseInt_(currentStr, NIL);
|
||||
var delta = parseInt_(deltaStr, NIL);
|
||||
return (isSxTruthy(sxOr(isNil(cur), isNil(delta))) ? deltaStr : (String((cur + delta))));
|
||||
})(); };
|
||||
|
||||
// _apply-kw-pairs
|
||||
var _applyKwPairs = function(content, kwPairs) { return (isSxTruthy(isEmpty(kwPairs)) ? content : (function() {
|
||||
var pair = first(kwPairs);
|
||||
var kw = first(pair);
|
||||
var rawVal = nth(pair, 1);
|
||||
return (function() {
|
||||
var actualVal = (isSxTruthy(_isDeltaValue_p(rawVal)) ? (function() {
|
||||
var current = _findKeywordValue(content, kw);
|
||||
return (isSxTruthy(isNil(current)) ? rawVal : _applyDelta(current, rawVal));
|
||||
})() : rawVal);
|
||||
return _applyKwPairs(_setKeywordInContent(content, kw, actualVal), rest(kwPairs));
|
||||
})();
|
||||
})()); };
|
||||
|
||||
// _apply-keywords-to-url
|
||||
var _applyKeywordsToUrl = function(url, kwPairs) { return (isSxTruthy(isEmpty(kwPairs)) ? url : (function() {
|
||||
var parts = _extractInnermost(url);
|
||||
return (function() {
|
||||
var newContent = _applyKwPairs(get(parts, "content"), kwPairs);
|
||||
return (String(get(parts, "before")) + String(newContent) + String(get(parts, "suffix")));
|
||||
})();
|
||||
})()); };
|
||||
|
||||
// _normalize-relative
|
||||
var _normalizeRelative = function(url) { return (isSxTruthy(startsWith(url, "(")) ? url : (String("(") + String(url) + String(")"))); };
|
||||
|
||||
// resolve-relative-url
|
||||
var resolveRelativeUrl = function(current, relative) { return (function() {
|
||||
var canonical = _normalizeRelative(relative);
|
||||
return (function() {
|
||||
var relInner = slice(canonical, 1, (len(canonical) - 1));
|
||||
return (function() {
|
||||
var dots = _countLeadingDots(relInner);
|
||||
var body = slice(relInner, _countLeadingDots(relInner));
|
||||
return (isSxTruthy((dots == 0)) ? current : (function() {
|
||||
var parsed = _parseRelativeBody(body);
|
||||
var posBody = get(parsed, "positional");
|
||||
var kwPairs = get(parsed, "keywords");
|
||||
return (function() {
|
||||
var afterNav = (isSxTruthy((dots == 1)) ? (isSxTruthy(isEmpty(posBody)) ? current : (function() {
|
||||
var stripped = _stripTrailingClose(current);
|
||||
var suffix = slice(current, len(_stripTrailingClose(current)));
|
||||
return (String(stripped) + String(".") + String(posBody) + String(suffix));
|
||||
})()) : (function() {
|
||||
var base = _popSxUrlLevels(current, (dots - 1));
|
||||
return (isSxTruthy(isEmpty(posBody)) ? base : (isSxTruthy((base == "/")) ? (String("/(") + String(posBody) + String(")")) : (function() {
|
||||
var stripped = _stripTrailingClose(base);
|
||||
var suffix = slice(base, len(_stripTrailingClose(base)));
|
||||
return (String(stripped) + String(".(") + String(posBody) + String(")") + String(suffix));
|
||||
})()));
|
||||
})());
|
||||
return _applyKeywordsToUrl(afterNav, kwPairs);
|
||||
})();
|
||||
})());
|
||||
})();
|
||||
})();
|
||||
})(); };
|
||||
|
||||
// relative-sx-url?
|
||||
var relativeSxUrl_p = function(url) { return sxOr((isSxTruthy(startsWith(url, "(")) && !isSxTruthy(startsWith(url, "/("))), startsWith(url, ".")); };
|
||||
|
||||
// _url-special-forms
|
||||
var _urlSpecialForms = function() { return ["!source", "!inspect", "!diff", "!search", "!raw", "!json"]; };
|
||||
|
||||
// url-special-form?
|
||||
var urlSpecialForm_p = function(name) { return (isSxTruthy(startsWith(name, "!")) && contains(_urlSpecialForms(), name)); };
|
||||
|
||||
// parse-sx-url
|
||||
var parseSxUrl = function(url) { return (isSxTruthy((url == "/")) ? {"type": "home", "raw": url} : (isSxTruthy(relativeSxUrl_p(url)) ? {"type": "relative", "raw": url} : (isSxTruthy((isSxTruthy(startsWith(url, "/(!")) && endsWith(url, ")"))) ? (function() {
|
||||
var inner = slice(url, 2, (len(url) - 1));
|
||||
return (function() {
|
||||
var dotPos = _indexOfSafe(inner, ".");
|
||||
var parenPos = _indexOfSafe(inner, "(");
|
||||
return (function() {
|
||||
var endPos = (isSxTruthy((isSxTruthy(isNil(dotPos)) && isNil(parenPos))) ? len(inner) : (isSxTruthy(isNil(dotPos)) ? parenPos : (isSxTruthy(isNil(parenPos)) ? dotPos : min(dotPos, parenPos))));
|
||||
return (function() {
|
||||
var formName = slice(inner, 0, endPos);
|
||||
var restPart = slice(inner, endPos);
|
||||
return (function() {
|
||||
var innerExpr = (isSxTruthy(startsWith(restPart, ".")) ? slice(restPart, 1) : restPart);
|
||||
return {"type": "special-form", "form": formName, "inner": innerExpr, "raw": url};
|
||||
})();
|
||||
})();
|
||||
})();
|
||||
})();
|
||||
})() : (isSxTruthy((isSxTruthy(startsWith(url, "/(~")) && endsWith(url, ")"))) ? (function() {
|
||||
var name = slice(url, 2, (len(url) - 1));
|
||||
return {"type": "direct-component", "name": name, "raw": url};
|
||||
})() : (isSxTruthy((isSxTruthy(startsWith(url, "/(")) && endsWith(url, ")"))) ? {"type": "absolute", "raw": url} : {"type": "path", "raw": url}))))); };
|
||||
|
||||
// url-special-form-name
|
||||
var urlSpecialFormName = function(url) { return (function() {
|
||||
var parsed = parseSxUrl(url);
|
||||
return (isSxTruthy((get(parsed, "type") == "special-form")) ? get(parsed, "form") : NIL);
|
||||
})(); };
|
||||
|
||||
// url-special-form-inner
|
||||
var urlSpecialFormInner = function(url) { return (function() {
|
||||
var parsed = parseSxUrl(url);
|
||||
return (isSxTruthy((get(parsed, "type") == "special-form")) ? get(parsed, "inner") : NIL);
|
||||
})(); };
|
||||
|
||||
|
||||
// === Transpiled from signals (reactive signal runtime) ===
|
||||
|
||||
|
||||
@@ -155,11 +155,426 @@
|
||||
(str "/" (join "/" (map _fn-to-segment segs)))))))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 7. Relative SX URL resolution
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Resolves relative SX URLs against the current absolute URL.
|
||||
;; This is a macro in the deepest sense: SX transforming SX into SX.
|
||||
;; The URL is code. Relative resolution is code transformation.
|
||||
;;
|
||||
;; Relative URLs start with ( or . :
|
||||
;; (.slug) → append slug as argument to innermost call
|
||||
;; (..section) → up 1: replace innermost with new nested call
|
||||
;; (...section) → up 2: replace 2 innermost levels
|
||||
;;
|
||||
;; Bare-dot shorthand (parens optional):
|
||||
;; .slug → same as (.slug)
|
||||
;; .. → same as (..) — go up one level
|
||||
;; ... → same as (...) — go up two levels
|
||||
;; .:page.4 → same as (.:page.4) — set keyword
|
||||
;;
|
||||
;; Dot count semantics (parallels filesystem . and ..):
|
||||
;; 1 dot = current level (append argument / modify keyword)
|
||||
;; 2 dots = up 1 level (sibling call)
|
||||
;; 3 dots = up 2 levels
|
||||
;; N dots = up N-1 levels
|
||||
;;
|
||||
;; Keyword operations (set, delta):
|
||||
;; (.:page.4) → set :page to 4 at current level
|
||||
;; (.:page.+1) → increment :page by 1 (delta)
|
||||
;; (.:page.-1) → decrement :page by 1 (delta)
|
||||
;; (.slug.:page.1) → append slug AND set :page=1
|
||||
;;
|
||||
;; Examples (current = "/(geography.(hypermedia.(example)))"):
|
||||
;; (.progress-bar) → /(geography.(hypermedia.(example.progress-bar)))
|
||||
;; (..reactive.demo) → /(geography.(hypermedia.(reactive.demo)))
|
||||
;; (...marshes) → /(geography.(marshes))
|
||||
;; (..) → /(geography.(hypermedia))
|
||||
;; (...) → /(geography)
|
||||
;;
|
||||
;; Keyword examples (current = "/(language.(spec.(explore.signals.:page.3)))"):
|
||||
;; (.:page.4) → /(language.(spec.(explore.signals.:page.4)))
|
||||
;; (.:page.+1) → /(language.(spec.(explore.signals.:page.4)))
|
||||
;; (.:page.-1) → /(language.(spec.(explore.signals.:page.2)))
|
||||
;; (..eval) → /(language.(spec.(eval)))
|
||||
;; (..eval.:page.1) → /(language.(spec.(eval.:page.1)))
|
||||
|
||||
(define _count-leading-dots :effects []
|
||||
(fn ((s :as string))
|
||||
(if (empty? s)
|
||||
0
|
||||
(if (starts-with? s ".")
|
||||
(+ 1 (_count-leading-dots (slice s 1)))
|
||||
0))))
|
||||
|
||||
(define _strip-trailing-close :effects []
|
||||
(fn ((s :as string))
|
||||
;; Strip trailing ) characters: "/(a.(b.(c" from "/(a.(b.(c)))"
|
||||
(if (ends-with? s ")")
|
||||
(_strip-trailing-close (slice s 0 (- (len s) 1)))
|
||||
s)))
|
||||
|
||||
(define _index-of-safe :effects []
|
||||
(fn ((s :as string) (needle :as string))
|
||||
;; Wrapper around index-of that normalizes -1 to nil.
|
||||
;; (index-of returns -1 on some platforms, nil on others.)
|
||||
(let ((idx (index-of s needle)))
|
||||
(if (or (nil? idx) (< idx 0)) nil idx))))
|
||||
|
||||
(define _last-index-of :effects []
|
||||
(fn ((s :as string) (needle :as string))
|
||||
;; Find the last occurrence of needle in s. Returns nil if not found.
|
||||
(let ((idx (_index-of-safe s needle)))
|
||||
(if (nil? idx)
|
||||
nil
|
||||
(let ((rest-idx (_last-index-of (slice s (+ idx 1)) needle)))
|
||||
(if (nil? rest-idx)
|
||||
idx
|
||||
(+ (+ idx 1) rest-idx)))))))
|
||||
|
||||
(define _pop-sx-url-level :effects []
|
||||
(fn ((url :as string))
|
||||
;; Remove the innermost nesting level from an absolute SX URL.
|
||||
;; "/(a.(b.(c)))" → "/(a.(b))"
|
||||
;; "/(a.(b))" → "/(a)"
|
||||
;; "/(a)" → "/"
|
||||
(let ((stripped (_strip-trailing-close url))
|
||||
(close-count (- (len url) (len (_strip-trailing-close url)))))
|
||||
(if (<= close-count 1)
|
||||
"/" ;; at root, popping goes to bare root
|
||||
(let ((last-dp (_last-index-of stripped ".(")))
|
||||
(if (nil? last-dp)
|
||||
"/" ;; single-level URL, pop to root
|
||||
;; Remove from .( to end of stripped, drop one closing paren
|
||||
(str (slice stripped 0 last-dp)
|
||||
(slice url (- (len url) (- close-count 1))))))))))
|
||||
|
||||
(define _pop-sx-url-levels :effects []
|
||||
(fn ((url :as string) (n :as number))
|
||||
(if (<= n 0)
|
||||
url
|
||||
(_pop-sx-url-levels (_pop-sx-url-level url) (- n 1)))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 8. Relative URL body parsing — positional vs keyword tokens
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Body "slug.:page.4" → positional "slug", keywords ((:page 4))
|
||||
;; Body ":page.+1" → positional "", keywords ((:page +1))
|
||||
|
||||
(define _split-pos-kw :effects []
|
||||
(fn ((tokens :as list) (i :as number) (pos :as list) (kw :as list))
|
||||
;; Walk tokens: non-: tokens are positional, : tokens consume next as value
|
||||
(if (>= i (len tokens))
|
||||
{"positional" (join "." pos) "keywords" kw}
|
||||
(let ((tok (nth tokens i)))
|
||||
(if (starts-with? tok ":")
|
||||
;; Keyword: take this + next token as a pair
|
||||
(let ((val (if (< (+ i 1) (len tokens))
|
||||
(nth tokens (+ i 1))
|
||||
"")))
|
||||
(_split-pos-kw tokens (+ i 2) pos
|
||||
(append kw (list (list tok val)))))
|
||||
;; Positional token
|
||||
(_split-pos-kw tokens (+ i 1)
|
||||
(append pos (list tok))
|
||||
kw))))))
|
||||
|
||||
(define _parse-relative-body :effects []
|
||||
(fn ((body :as string))
|
||||
;; Returns {"positional" <string> "keywords" <list of (kw val) pairs>}
|
||||
(if (empty? body)
|
||||
{"positional" "" "keywords" (list)}
|
||||
(_split-pos-kw (split body ".") 0 (list) (list)))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 9. Keyword operations on URL expressions
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Extract, find, and modify keyword arguments in the innermost expression.
|
||||
|
||||
(define _extract-innermost :effects []
|
||||
(fn ((url :as string))
|
||||
;; Returns {"before" ... "content" ... "suffix" ...}
|
||||
;; where before + content + suffix = url
|
||||
;; content = the innermost expression's dot-separated tokens
|
||||
(let ((stripped (_strip-trailing-close url))
|
||||
(suffix (slice url (len (_strip-trailing-close url)))))
|
||||
(let ((last-dp (_last-index-of stripped ".(")))
|
||||
(if (nil? last-dp)
|
||||
;; Single-level: /(content)
|
||||
{"before" "/("
|
||||
"content" (slice stripped 2)
|
||||
"suffix" suffix}
|
||||
;; Multi-level: .../.(content)...)
|
||||
{"before" (slice stripped 0 (+ last-dp 2))
|
||||
"content" (slice stripped (+ last-dp 2))
|
||||
"suffix" suffix})))))
|
||||
|
||||
(define _find-kw-in-tokens :effects []
|
||||
(fn ((tokens :as list) (i :as number) (kw :as string))
|
||||
;; Find value of keyword kw in token list. Returns nil if not found.
|
||||
(if (>= i (len tokens))
|
||||
nil
|
||||
(if (and (= (nth tokens i) kw)
|
||||
(< (+ i 1) (len tokens)))
|
||||
(nth tokens (+ i 1))
|
||||
(_find-kw-in-tokens tokens (+ i 1) kw)))))
|
||||
|
||||
(define _find-keyword-value :effects []
|
||||
(fn ((content :as string) (kw :as string))
|
||||
;; Find keyword's value in dot-separated content string.
|
||||
;; "explore.signals.:page.3" ":page" → "3"
|
||||
(_find-kw-in-tokens (split content ".") 0 kw)))
|
||||
|
||||
(define _replace-kw-in-tokens :effects []
|
||||
(fn ((tokens :as list) (i :as number) (kw :as string) (value :as string))
|
||||
;; Replace keyword's value in token list. Returns new token list.
|
||||
(if (>= i (len tokens))
|
||||
(list)
|
||||
(if (and (= (nth tokens i) kw)
|
||||
(< (+ i 1) (len tokens)))
|
||||
;; Found — keep keyword, replace value, concat rest
|
||||
(append (list kw value)
|
||||
(_replace-kw-in-tokens tokens (+ i 2) kw value))
|
||||
;; Not this keyword — keep token, continue
|
||||
(cons (nth tokens i)
|
||||
(_replace-kw-in-tokens tokens (+ i 1) kw value))))))
|
||||
|
||||
(define _set-keyword-in-content :effects []
|
||||
(fn ((content :as string) (kw :as string) (value :as string))
|
||||
;; Set or replace keyword value in dot-separated content.
|
||||
;; "a.b.:page.3" ":page" "4" → "a.b.:page.4"
|
||||
;; "a.b" ":page" "1" → "a.b.:page.1"
|
||||
(let ((current (_find-keyword-value content kw)))
|
||||
(if (nil? current)
|
||||
;; Not found — append
|
||||
(str content "." kw "." value)
|
||||
;; Found — replace
|
||||
(join "." (_replace-kw-in-tokens (split content ".") 0 kw value))))))
|
||||
|
||||
(define _is-delta-value? :effects []
|
||||
(fn ((s :as string))
|
||||
;; "+1", "-2", "+10" are deltas. "-" alone is not.
|
||||
(and (not (empty? s))
|
||||
(> (len s) 1)
|
||||
(or (starts-with? s "+") (starts-with? s "-")))))
|
||||
|
||||
(define _apply-delta :effects []
|
||||
(fn ((current-str :as string) (delta-str :as string))
|
||||
;; Apply numeric delta to current value string.
|
||||
;; "3" "+1" → "4", "3" "-1" → "2"
|
||||
(let ((cur (parse-int current-str nil))
|
||||
(delta (parse-int delta-str nil)))
|
||||
(if (or (nil? cur) (nil? delta))
|
||||
delta-str ;; fallback: use delta as literal value
|
||||
(str (+ cur delta))))))
|
||||
|
||||
(define _apply-kw-pairs :effects []
|
||||
(fn ((content :as string) (kw-pairs :as list))
|
||||
;; Apply keyword modifications to content, one at a time.
|
||||
(if (empty? kw-pairs)
|
||||
content
|
||||
(let ((pair (first kw-pairs))
|
||||
(kw (first pair))
|
||||
(raw-val (nth pair 1)))
|
||||
(let ((actual-val
|
||||
(if (_is-delta-value? raw-val)
|
||||
(let ((current (_find-keyword-value content kw)))
|
||||
(if (nil? current)
|
||||
raw-val ;; no current value, treat delta as literal
|
||||
(_apply-delta current raw-val)))
|
||||
raw-val)))
|
||||
(_apply-kw-pairs
|
||||
(_set-keyword-in-content content kw actual-val)
|
||||
(rest kw-pairs)))))))
|
||||
|
||||
(define _apply-keywords-to-url :effects []
|
||||
(fn ((url :as string) (kw-pairs :as list))
|
||||
;; Apply keyword modifications to the innermost expression of a URL.
|
||||
(if (empty? kw-pairs)
|
||||
url
|
||||
(let ((parts (_extract-innermost url)))
|
||||
(let ((new-content (_apply-kw-pairs (get parts "content") kw-pairs)))
|
||||
(str (get parts "before") new-content (get parts "suffix")))))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 10. Public API: resolve-relative-url (structural + keywords)
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(define _normalize-relative :effects []
|
||||
(fn ((url :as string))
|
||||
;; Normalize bare-dot shorthand to paren form.
|
||||
;; ".." → "(..)"
|
||||
;; ".slug" → "(.slug)"
|
||||
;; ".:page.4" → "(.:page.4)"
|
||||
;; "(.slug)" → "(.slug)" (already canonical)
|
||||
(if (starts-with? url "(")
|
||||
url
|
||||
(str "(" url ")"))))
|
||||
|
||||
(define resolve-relative-url :effects []
|
||||
(fn ((current :as string) (relative :as string))
|
||||
;; current: absolute SX URL "/(geography.(hypermedia.(example)))"
|
||||
;; relative: relative SX URL "(.progress-bar)" or ".." or ".:page.+1"
|
||||
;; Returns: absolute SX URL
|
||||
(let ((canonical (_normalize-relative relative)))
|
||||
(let ((rel-inner (slice canonical 1 (- (len canonical) 1))))
|
||||
(let ((dots (_count-leading-dots rel-inner))
|
||||
(body (slice rel-inner (_count-leading-dots rel-inner))))
|
||||
(if (= dots 0)
|
||||
current ;; no dots — not a relative URL
|
||||
;; Parse body into positional part + keyword pairs
|
||||
(let ((parsed (_parse-relative-body body))
|
||||
(pos-body (get parsed "positional"))
|
||||
(kw-pairs (get parsed "keywords")))
|
||||
;; Step 1: structural navigation
|
||||
(let ((after-nav
|
||||
(if (= dots 1)
|
||||
;; One dot = current level
|
||||
(if (empty? pos-body)
|
||||
current ;; no positional → stay here (keyword-only)
|
||||
;; Append positional part at current level
|
||||
(let ((stripped (_strip-trailing-close current))
|
||||
(suffix (slice current (len (_strip-trailing-close current)))))
|
||||
(str stripped "." pos-body suffix)))
|
||||
;; Two+ dots = pop (dots-1) levels
|
||||
(let ((base (_pop-sx-url-levels current (- dots 1))))
|
||||
(if (empty? pos-body)
|
||||
base ;; no positional → just pop (cd ..)
|
||||
(if (= base "/")
|
||||
(str "/(" pos-body ")")
|
||||
(let ((stripped (_strip-trailing-close base))
|
||||
(suffix (slice base (len (_strip-trailing-close base)))))
|
||||
(str stripped ".(" pos-body ")" suffix))))))))
|
||||
;; Step 2: apply keyword modifications
|
||||
(_apply-keywords-to-url after-nav kw-pairs)))))))))
|
||||
|
||||
;; Check if a URL is relative (starts with ( but not /( , or starts with .)
|
||||
(define relative-sx-url? :effects []
|
||||
(fn ((url :as string))
|
||||
(or (and (starts-with? url "(")
|
||||
(not (starts-with? url "/(")))
|
||||
(starts-with? url "."))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; 11. URL special forms (! prefix)
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Special forms are meta-operations on URL expressions.
|
||||
;; Distinguished by `!` prefix to avoid name collisions with sections/pages.
|
||||
;;
|
||||
;; Known forms:
|
||||
;; !source — show defcomp source code
|
||||
;; !inspect — deps, CSS footprint, render plan, IO
|
||||
;; !diff — side-by-side comparison of two expressions
|
||||
;; !search — grep within a page/spec
|
||||
;; !raw — skip ~sx-doc wrapping, return raw content
|
||||
;; !json — return content as JSON data
|
||||
;;
|
||||
;; URL examples:
|
||||
;; /(!source.(~essay-sx-sucks))
|
||||
;; /(!inspect.(language.(doc.primitives)))
|
||||
;; /(!diff.(language.(spec.signals)).(language.(spec.eval)))
|
||||
;; /(!search."define".:in.(language.(spec.signals)))
|
||||
;; /(!raw.(~some-component))
|
||||
;; /(!json.(language.(doc.primitives)))
|
||||
|
||||
(define _url-special-forms :effects []
|
||||
(fn ()
|
||||
;; Returns the set of known URL special form names.
|
||||
(list "!source" "!inspect" "!diff" "!search" "!raw" "!json")))
|
||||
|
||||
(define url-special-form? :effects []
|
||||
(fn ((name :as string))
|
||||
;; Check if a name is a URL special form (starts with ! and is known).
|
||||
(and (starts-with? name "!")
|
||||
(contains? (_url-special-forms) name))))
|
||||
|
||||
(define parse-sx-url :effects []
|
||||
(fn ((url :as string))
|
||||
;; Parse an SX URL into a structured descriptor.
|
||||
;; Returns a dict with:
|
||||
;; "type" — "home" | "absolute" | "relative" | "special-form" | "direct-component"
|
||||
;; "form" — special form name (for special-form type), e.g. "!source"
|
||||
;; "inner" — inner URL expression string (without the special form wrapper)
|
||||
;; "raw" — original URL string
|
||||
;;
|
||||
;; Examples:
|
||||
;; "/" → {"type" "home" "raw" "/"}
|
||||
;; "/(language.(doc.intro))" → {"type" "absolute" "raw" ...}
|
||||
;; "(.slug)" → {"type" "relative" "raw" ...}
|
||||
;; "..slug" → {"type" "relative" "raw" ...}
|
||||
;; "/(!source.(~essay))" → {"type" "special-form" "form" "!source" "inner" "(~essay)" "raw" ...}
|
||||
;; "/(~essay-sx-sucks)" → {"type" "direct-component" "name" "~essay-sx-sucks" "raw" ...}
|
||||
(cond
|
||||
(= url "/")
|
||||
{"type" "home" "raw" url}
|
||||
(relative-sx-url? url)
|
||||
{"type" "relative" "raw" url}
|
||||
(and (starts-with? url "/(!")
|
||||
(ends-with? url ")"))
|
||||
;; Special form: /(!source.(~essay)) or /(!diff.a.b)
|
||||
;; Extract the form name (first dot-separated token after /()
|
||||
(let ((inner (slice url 2 (- (len url) 1))))
|
||||
;; inner = "!source.(~essay)" or "!diff.(a).(b)"
|
||||
(let ((dot-pos (_index-of-safe inner "."))
|
||||
(paren-pos (_index-of-safe inner "(")))
|
||||
;; Form name ends at first . or ( (whichever comes first)
|
||||
(let ((end-pos (cond
|
||||
(and (nil? dot-pos) (nil? paren-pos)) (len inner)
|
||||
(nil? dot-pos) paren-pos
|
||||
(nil? paren-pos) dot-pos
|
||||
:else (min dot-pos paren-pos))))
|
||||
(let ((form-name (slice inner 0 end-pos))
|
||||
(rest-part (slice inner end-pos)))
|
||||
;; rest-part starts with "." → strip leading dot
|
||||
(let ((inner-expr (if (starts-with? rest-part ".")
|
||||
(slice rest-part 1)
|
||||
rest-part)))
|
||||
{"type" "special-form"
|
||||
"form" form-name
|
||||
"inner" inner-expr
|
||||
"raw" url})))))
|
||||
(and (starts-with? url "/(~")
|
||||
(ends-with? url ")"))
|
||||
;; Direct component: /(~essay-sx-sucks)
|
||||
(let ((name (slice url 2 (- (len url) 1))))
|
||||
{"type" "direct-component" "name" name "raw" url})
|
||||
(and (starts-with? url "/(")
|
||||
(ends-with? url ")"))
|
||||
{"type" "absolute" "raw" url}
|
||||
:else
|
||||
{"type" "path" "raw" url})))
|
||||
|
||||
(define url-special-form-name :effects []
|
||||
(fn ((url :as string))
|
||||
;; Extract the special form name from a URL, or nil if not a special form.
|
||||
;; "/(!source.(~essay))" → "!source"
|
||||
;; "/(language.(doc))" → nil
|
||||
(let ((parsed (parse-sx-url url)))
|
||||
(if (= (get parsed "type") "special-form")
|
||||
(get parsed "form")
|
||||
nil))))
|
||||
|
||||
(define url-special-form-inner :effects []
|
||||
(fn ((url :as string))
|
||||
;; Extract the inner expression from a special form URL, or nil.
|
||||
;; "/(!source.(~essay))" → "(~essay)"
|
||||
;; "/(!diff.(a).(b))" → "(a).(b)"
|
||||
(let ((parsed (parse-sx-url url)))
|
||||
(if (= (get parsed "type") "special-form")
|
||||
(get parsed "inner")
|
||||
nil))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Platform interface — none required
|
||||
;; --------------------------------------------------------------------------
|
||||
;; All functions use only pure primitives:
|
||||
;; split, slice, starts-with?, ends-with?, len, empty?, replace,
|
||||
;; map, filter, for-each, for-each-indexed, nth, get, dict-set!, merge,
|
||||
;; list, nil?, not, =, case, join, str
|
||||
;; list, nil?, not, =, case, join, str, index-of, and, or, cons,
|
||||
;; first, rest, append, parse-int, contains?, min, cond
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
@@ -2881,6 +2881,330 @@ def build_affinity_analysis(demo_components, page_plans):
|
||||
return {'components': demo_components, 'page-plans': page_plans}
|
||||
|
||||
|
||||
# === Transpiled from router (client-side route matching) ===
|
||||
|
||||
# split-path-segments
|
||||
def split_path_segments(path):
|
||||
trimmed = (slice(path, 1) if sx_truthy(starts_with_p(path, '/')) else path)
|
||||
trimmed2 = (slice(trimmed, 0, (len(trimmed) - 1)) if sx_truthy(((not sx_truthy(empty_p(trimmed))) if not sx_truthy((not sx_truthy(empty_p(trimmed)))) else ends_with_p(trimmed, '/'))) else trimmed)
|
||||
if sx_truthy(empty_p(trimmed2)):
|
||||
return []
|
||||
else:
|
||||
return split(trimmed2, '/')
|
||||
|
||||
# make-route-segment
|
||||
def make_route_segment(seg):
|
||||
if sx_truthy((starts_with_p(seg, '<') if not sx_truthy(starts_with_p(seg, '<')) else ends_with_p(seg, '>'))):
|
||||
param_name = slice(seg, 1, (len(seg) - 1))
|
||||
d = {}
|
||||
d['type'] = 'param'
|
||||
d['value'] = param_name
|
||||
return d
|
||||
else:
|
||||
d = {}
|
||||
d['type'] = 'literal'
|
||||
d['value'] = seg
|
||||
return d
|
||||
|
||||
# parse-route-pattern
|
||||
def parse_route_pattern(pattern):
|
||||
segments = split_path_segments(pattern)
|
||||
return map(make_route_segment, segments)
|
||||
|
||||
# match-route-segments
|
||||
def match_route_segments(path_segs, parsed_segs):
|
||||
_cells = {}
|
||||
if sx_truthy((not sx_truthy((len(path_segs) == len(parsed_segs))))):
|
||||
return NIL
|
||||
else:
|
||||
params = {}
|
||||
_cells['matched'] = True
|
||||
for_each_indexed(lambda i, parsed_seg: ((lambda path_seg: (lambda seg_type: ((_sx_cell_set(_cells, 'matched', False) if sx_truthy((not sx_truthy((path_seg == get(parsed_seg, 'value'))))) else NIL) if sx_truthy((seg_type == 'literal')) else (_sx_dict_set(params, get(parsed_seg, 'value'), path_seg) if sx_truthy((seg_type == 'param')) else _sx_cell_set(_cells, 'matched', False))))(get(parsed_seg, 'type')))(nth(path_segs, i)) if sx_truthy(_cells['matched']) else NIL), parsed_segs)
|
||||
if sx_truthy(_cells['matched']):
|
||||
return params
|
||||
else:
|
||||
return NIL
|
||||
|
||||
# match-route
|
||||
def match_route(path, pattern):
|
||||
path_segs = split_path_segments(path)
|
||||
parsed_segs = parse_route_pattern(pattern)
|
||||
return match_route_segments(path_segs, parsed_segs)
|
||||
|
||||
# find-matching-route
|
||||
def find_matching_route(path, routes):
|
||||
_cells = {}
|
||||
match_path = ((sx_url_to_path(path) if sx_truthy(sx_url_to_path(path)) else path) if sx_truthy(starts_with_p(path, '/(')) else path)
|
||||
path_segs = split_path_segments(match_path)
|
||||
_cells['result'] = NIL
|
||||
for route in routes:
|
||||
if sx_truthy(is_nil(_cells['result'])):
|
||||
params = match_route_segments(path_segs, get(route, 'parsed'))
|
||||
if sx_truthy((not sx_truthy(is_nil(params)))):
|
||||
matched = merge(route, {})
|
||||
matched['params'] = params
|
||||
_cells['result'] = matched
|
||||
return _cells['result']
|
||||
|
||||
# _fn-to-segment
|
||||
def _fn_to_segment(name):
|
||||
_match = name
|
||||
if _match == 'doc':
|
||||
return 'docs'
|
||||
elif _match == 'spec':
|
||||
return 'specs'
|
||||
elif _match == 'bootstrapper':
|
||||
return 'bootstrappers'
|
||||
elif _match == 'test':
|
||||
return 'testing'
|
||||
elif _match == 'example':
|
||||
return 'examples'
|
||||
elif _match == 'protocol':
|
||||
return 'protocols'
|
||||
elif _match == 'essay':
|
||||
return 'essays'
|
||||
elif _match == 'plan':
|
||||
return 'plans'
|
||||
elif _match == 'reference-detail':
|
||||
return 'reference'
|
||||
else:
|
||||
return name
|
||||
|
||||
# sx-url-to-path
|
||||
def sx_url_to_path(url):
|
||||
if sx_truthy((not sx_truthy((starts_with_p(url, '/(') if not sx_truthy(starts_with_p(url, '/(')) else ends_with_p(url, ')'))))):
|
||||
return NIL
|
||||
else:
|
||||
inner = slice(url, 2, (len(url) - 1))
|
||||
s = replace(replace(replace(inner, '.', '/'), '(', ''), ')', '')
|
||||
segs = filter(lambda s: (not sx_truthy(empty_p(s))), split(s, '/'))
|
||||
return sx_str('/', join('/', map(_fn_to_segment, segs)))
|
||||
|
||||
# _count-leading-dots
|
||||
def _count_leading_dots(s):
|
||||
if sx_truthy(empty_p(s)):
|
||||
return 0
|
||||
else:
|
||||
if sx_truthy(starts_with_p(s, '.')):
|
||||
return (1 + _count_leading_dots(slice(s, 1)))
|
||||
else:
|
||||
return 0
|
||||
|
||||
# _strip-trailing-close
|
||||
def _strip_trailing_close(s):
|
||||
if sx_truthy(ends_with_p(s, ')')):
|
||||
return _strip_trailing_close(slice(s, 0, (len(s) - 1)))
|
||||
else:
|
||||
return s
|
||||
|
||||
# _index-of-safe
|
||||
def _index_of_safe(s, needle):
|
||||
idx = index_of(s, needle)
|
||||
if sx_truthy((is_nil(idx) if sx_truthy(is_nil(idx)) else (idx < 0))):
|
||||
return NIL
|
||||
else:
|
||||
return idx
|
||||
|
||||
# _last-index-of
|
||||
def _last_index_of(s, needle):
|
||||
idx = _index_of_safe(s, needle)
|
||||
if sx_truthy(is_nil(idx)):
|
||||
return NIL
|
||||
else:
|
||||
rest_idx = _last_index_of(slice(s, (idx + 1)), needle)
|
||||
if sx_truthy(is_nil(rest_idx)):
|
||||
return idx
|
||||
else:
|
||||
return ((idx + 1) + rest_idx)
|
||||
|
||||
# _pop-sx-url-level
|
||||
def _pop_sx_url_level(url):
|
||||
stripped = _strip_trailing_close(url)
|
||||
close_count = (len(url) - len(_strip_trailing_close(url)))
|
||||
if sx_truthy((close_count <= 1)):
|
||||
return '/'
|
||||
else:
|
||||
last_dp = _last_index_of(stripped, '.(')
|
||||
if sx_truthy(is_nil(last_dp)):
|
||||
return '/'
|
||||
else:
|
||||
return sx_str(slice(stripped, 0, last_dp), slice(url, (len(url) - (close_count - 1))))
|
||||
|
||||
# _pop-sx-url-levels
|
||||
def _pop_sx_url_levels(url, n):
|
||||
if sx_truthy((n <= 0)):
|
||||
return url
|
||||
else:
|
||||
return _pop_sx_url_levels(_pop_sx_url_level(url), (n - 1))
|
||||
|
||||
# _split-pos-kw
|
||||
def _split_pos_kw(tokens, i, pos, kw):
|
||||
if sx_truthy((i >= len(tokens))):
|
||||
return {'positional': join('.', pos), 'keywords': kw}
|
||||
else:
|
||||
tok = nth(tokens, i)
|
||||
if sx_truthy(starts_with_p(tok, ':')):
|
||||
val = (nth(tokens, (i + 1)) if sx_truthy(((i + 1) < len(tokens))) else '')
|
||||
return _split_pos_kw(tokens, (i + 2), pos, append(kw, [[tok, val]]))
|
||||
else:
|
||||
return _split_pos_kw(tokens, (i + 1), append(pos, [tok]), kw)
|
||||
|
||||
# _parse-relative-body
|
||||
def _parse_relative_body(body):
|
||||
if sx_truthy(empty_p(body)):
|
||||
return {'positional': '', 'keywords': []}
|
||||
else:
|
||||
return _split_pos_kw(split(body, '.'), 0, [], [])
|
||||
|
||||
# _extract-innermost
|
||||
def _extract_innermost(url):
|
||||
stripped = _strip_trailing_close(url)
|
||||
suffix = slice(url, len(_strip_trailing_close(url)))
|
||||
last_dp = _last_index_of(stripped, '.(')
|
||||
if sx_truthy(is_nil(last_dp)):
|
||||
return {'before': '/(', 'content': slice(stripped, 2), 'suffix': suffix}
|
||||
else:
|
||||
return {'before': slice(stripped, 0, (last_dp + 2)), 'content': slice(stripped, (last_dp + 2)), 'suffix': suffix}
|
||||
|
||||
# _find-kw-in-tokens
|
||||
def _find_kw_in_tokens(tokens, i, kw):
|
||||
if sx_truthy((i >= len(tokens))):
|
||||
return NIL
|
||||
else:
|
||||
if sx_truthy(((nth(tokens, i) == kw) if not sx_truthy((nth(tokens, i) == kw)) else ((i + 1) < len(tokens)))):
|
||||
return nth(tokens, (i + 1))
|
||||
else:
|
||||
return _find_kw_in_tokens(tokens, (i + 1), kw)
|
||||
|
||||
# _find-keyword-value
|
||||
def _find_keyword_value(content, kw):
|
||||
return _find_kw_in_tokens(split(content, '.'), 0, kw)
|
||||
|
||||
# _replace-kw-in-tokens
|
||||
def _replace_kw_in_tokens(tokens, i, kw, value):
|
||||
if sx_truthy((i >= len(tokens))):
|
||||
return []
|
||||
else:
|
||||
if sx_truthy(((nth(tokens, i) == kw) if not sx_truthy((nth(tokens, i) == kw)) else ((i + 1) < len(tokens)))):
|
||||
return append([kw, value], _replace_kw_in_tokens(tokens, (i + 2), kw, value))
|
||||
else:
|
||||
return cons(nth(tokens, i), _replace_kw_in_tokens(tokens, (i + 1), kw, value))
|
||||
|
||||
# _set-keyword-in-content
|
||||
def _set_keyword_in_content(content, kw, value):
|
||||
current = _find_keyword_value(content, kw)
|
||||
if sx_truthy(is_nil(current)):
|
||||
return sx_str(content, '.', kw, '.', value)
|
||||
else:
|
||||
return join('.', _replace_kw_in_tokens(split(content, '.'), 0, kw, value))
|
||||
|
||||
# _is-delta-value?
|
||||
def _is_delta_value_p(s):
|
||||
return ((not sx_truthy(empty_p(s))) if not sx_truthy((not sx_truthy(empty_p(s)))) else ((len(s) > 1) if not sx_truthy((len(s) > 1)) else (starts_with_p(s, '+') if sx_truthy(starts_with_p(s, '+')) else starts_with_p(s, '-'))))
|
||||
|
||||
# _apply-delta
|
||||
def _apply_delta(current_str, delta_str):
|
||||
cur = parse_int(current_str, NIL)
|
||||
delta = parse_int(delta_str, NIL)
|
||||
if sx_truthy((is_nil(cur) if sx_truthy(is_nil(cur)) else is_nil(delta))):
|
||||
return delta_str
|
||||
else:
|
||||
return sx_str((cur + delta))
|
||||
|
||||
# _apply-kw-pairs
|
||||
def _apply_kw_pairs(content, kw_pairs):
|
||||
if sx_truthy(empty_p(kw_pairs)):
|
||||
return content
|
||||
else:
|
||||
pair = first(kw_pairs)
|
||||
kw = first(pair)
|
||||
raw_val = nth(pair, 1)
|
||||
actual_val = ((lambda current: (raw_val if sx_truthy(is_nil(current)) else _apply_delta(current, raw_val)))(_find_keyword_value(content, kw)) if sx_truthy(_is_delta_value_p(raw_val)) else raw_val)
|
||||
return _apply_kw_pairs(_set_keyword_in_content(content, kw, actual_val), rest(kw_pairs))
|
||||
|
||||
# _apply-keywords-to-url
|
||||
def _apply_keywords_to_url(url, kw_pairs):
|
||||
if sx_truthy(empty_p(kw_pairs)):
|
||||
return url
|
||||
else:
|
||||
parts = _extract_innermost(url)
|
||||
new_content = _apply_kw_pairs(get(parts, 'content'), kw_pairs)
|
||||
return sx_str(get(parts, 'before'), new_content, get(parts, 'suffix'))
|
||||
|
||||
# _normalize-relative
|
||||
def _normalize_relative(url):
|
||||
if sx_truthy(starts_with_p(url, '(')):
|
||||
return url
|
||||
else:
|
||||
return sx_str('(', url, ')')
|
||||
|
||||
# resolve-relative-url
|
||||
def resolve_relative_url(current, relative):
|
||||
canonical = _normalize_relative(relative)
|
||||
rel_inner = slice(canonical, 1, (len(canonical) - 1))
|
||||
dots = _count_leading_dots(rel_inner)
|
||||
body = slice(rel_inner, _count_leading_dots(rel_inner))
|
||||
if sx_truthy((dots == 0)):
|
||||
return current
|
||||
else:
|
||||
parsed = _parse_relative_body(body)
|
||||
pos_body = get(parsed, 'positional')
|
||||
kw_pairs = get(parsed, 'keywords')
|
||||
after_nav = ((current if sx_truthy(empty_p(pos_body)) else (lambda stripped: (lambda suffix: sx_str(stripped, '.', pos_body, suffix))(slice(current, len(_strip_trailing_close(current)))))(_strip_trailing_close(current))) if sx_truthy((dots == 1)) else (lambda base: (base if sx_truthy(empty_p(pos_body)) else (sx_str('/(', pos_body, ')') if sx_truthy((base == '/')) else (lambda stripped: (lambda suffix: sx_str(stripped, '.(', pos_body, ')', suffix))(slice(base, len(_strip_trailing_close(base)))))(_strip_trailing_close(base)))))(_pop_sx_url_levels(current, (dots - 1))))
|
||||
return _apply_keywords_to_url(after_nav, kw_pairs)
|
||||
|
||||
# relative-sx-url?
|
||||
def relative_sx_url_p(url):
|
||||
return ((starts_with_p(url, '(') if not sx_truthy(starts_with_p(url, '(')) else (not sx_truthy(starts_with_p(url, '/(')))) if sx_truthy((starts_with_p(url, '(') if not sx_truthy(starts_with_p(url, '(')) else (not sx_truthy(starts_with_p(url, '/('))))) else starts_with_p(url, '.'))
|
||||
|
||||
# _url-special-forms
|
||||
def _url_special_forms():
|
||||
return ['!source', '!inspect', '!diff', '!search', '!raw', '!json']
|
||||
|
||||
# url-special-form?
|
||||
def url_special_form_p(name):
|
||||
return (starts_with_p(name, '!') if not sx_truthy(starts_with_p(name, '!')) else contains_p(_url_special_forms(), name))
|
||||
|
||||
# parse-sx-url
|
||||
def parse_sx_url(url):
|
||||
if sx_truthy((url == '/')):
|
||||
return {'type': 'home', 'raw': url}
|
||||
elif sx_truthy(relative_sx_url_p(url)):
|
||||
return {'type': 'relative', 'raw': url}
|
||||
elif sx_truthy((starts_with_p(url, '/(!') if not sx_truthy(starts_with_p(url, '/(!')) else ends_with_p(url, ')'))):
|
||||
inner = slice(url, 2, (len(url) - 1))
|
||||
dot_pos = _index_of_safe(inner, '.')
|
||||
paren_pos = _index_of_safe(inner, '(')
|
||||
end_pos = (len(inner) if sx_truthy((is_nil(dot_pos) if not sx_truthy(is_nil(dot_pos)) else is_nil(paren_pos))) else (paren_pos if sx_truthy(is_nil(dot_pos)) else (dot_pos if sx_truthy(is_nil(paren_pos)) else min(dot_pos, paren_pos))))
|
||||
form_name = slice(inner, 0, end_pos)
|
||||
rest_part = slice(inner, end_pos)
|
||||
inner_expr = (slice(rest_part, 1) if sx_truthy(starts_with_p(rest_part, '.')) else rest_part)
|
||||
return {'type': 'special-form', 'form': form_name, 'inner': inner_expr, 'raw': url}
|
||||
elif sx_truthy((starts_with_p(url, '/(~') if not sx_truthy(starts_with_p(url, '/(~')) else ends_with_p(url, ')'))):
|
||||
name = slice(url, 2, (len(url) - 1))
|
||||
return {'type': 'direct-component', 'name': name, 'raw': url}
|
||||
elif sx_truthy((starts_with_p(url, '/(') if not sx_truthy(starts_with_p(url, '/(')) else ends_with_p(url, ')'))):
|
||||
return {'type': 'absolute', 'raw': url}
|
||||
else:
|
||||
return {'type': 'path', 'raw': url}
|
||||
|
||||
# url-special-form-name
|
||||
def url_special_form_name(url):
|
||||
parsed = parse_sx_url(url)
|
||||
if sx_truthy((get(parsed, 'type') == 'special-form')):
|
||||
return get(parsed, 'form')
|
||||
else:
|
||||
return NIL
|
||||
|
||||
# url-special-form-inner
|
||||
def url_special_form_inner(url):
|
||||
parsed = parse_sx_url(url)
|
||||
if sx_truthy((get(parsed, 'type') == 'special-form')):
|
||||
return get(parsed, 'inner')
|
||||
else:
|
||||
return NIL
|
||||
|
||||
|
||||
# === Transpiled from signals (reactive signal runtime) ===
|
||||
|
||||
# signal
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
;; ==========================================================================
|
||||
;; test-router.sx — Tests for client-side route matching
|
||||
;; test-router.sx — Tests for client-side route matching & SX URL algebra
|
||||
;;
|
||||
;; Requires: test-framework.sx loaded first.
|
||||
;; Modules tested: router.sx
|
||||
@@ -119,7 +119,6 @@
|
||||
(let ((routes (list
|
||||
{:pattern "/docs/" :parsed (parse-route-pattern "/docs/") :name "docs-index"}
|
||||
{:pattern "/docs/<slug>" :parsed (parse-route-pattern "/docs/<slug>") :name "docs-page"})))
|
||||
;; /docs/ should match docs-index, not docs-page
|
||||
(let ((result (find-matching-route "/docs/" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal "docs-index" (get result "name")))))
|
||||
@@ -144,18 +143,566 @@
|
||||
:has-data false})))
|
||||
(let ((result (find-matching-route "/about" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-nil (get result "stream")))))
|
||||
(assert-nil (get result "stream"))))))
|
||||
|
||||
(deftest "streaming route with params propagates all properties"
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; sx-url-to-path — SX expression URL → old-style path
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "sx-url-to-path"
|
||||
(deftest "simple two-level"
|
||||
(assert-equal "/language/docs/introduction"
|
||||
(sx-url-to-path "/(language.(doc.introduction))")))
|
||||
|
||||
(deftest "deep nesting"
|
||||
(assert-equal "/geography/hypermedia/reference/attributes"
|
||||
(sx-url-to-path "/(geography.(hypermedia.(reference.attributes)))")))
|
||||
|
||||
(deftest "section index"
|
||||
(assert-equal "/language"
|
||||
(sx-url-to-path "/(language)")))
|
||||
|
||||
(deftest "function name mapping — doc to docs"
|
||||
(assert-equal "/language/docs/getting-started"
|
||||
(sx-url-to-path "/(language.(doc.getting-started))")))
|
||||
|
||||
(deftest "function name mapping — spec to specs"
|
||||
(assert-equal "/language/specs/core"
|
||||
(sx-url-to-path "/(language.(spec.core))")))
|
||||
|
||||
(deftest "function name mapping — example to examples"
|
||||
(assert-equal "/geography/hypermedia/examples/click-to-load"
|
||||
(sx-url-to-path "/(geography.(hypermedia.(example.click-to-load)))")))
|
||||
|
||||
(deftest "function name mapping — essay to essays"
|
||||
(assert-equal "/etc/essays/sx-sucks"
|
||||
(sx-url-to-path "/(etc.(essay.sx-sucks))")))
|
||||
|
||||
(deftest "function name mapping — plan to plans"
|
||||
(assert-equal "/etc/plans/spec-explorer"
|
||||
(sx-url-to-path "/(etc.(plan.spec-explorer))")))
|
||||
|
||||
(deftest "function name mapping — test to testing"
|
||||
(assert-equal "/language/testing/eval"
|
||||
(sx-url-to-path "/(language.(test.eval))")))
|
||||
|
||||
(deftest "function name mapping — bootstrapper to bootstrappers"
|
||||
(assert-equal "/language/bootstrappers/python"
|
||||
(sx-url-to-path "/(language.(bootstrapper.python))")))
|
||||
|
||||
(deftest "function name mapping — protocol to protocols"
|
||||
(assert-equal "/applications/protocols/wire-format"
|
||||
(sx-url-to-path "/(applications.(protocol.wire-format))")))
|
||||
|
||||
(deftest "function name mapping — reference-detail to reference"
|
||||
(assert-equal "/geography/hypermedia/reference/attributes"
|
||||
(sx-url-to-path "/(geography.(hypermedia.(reference-detail.attributes)))")))
|
||||
|
||||
(deftest "non-SX URL returns nil"
|
||||
(assert-nil (sx-url-to-path "/language/docs/introduction"))
|
||||
(assert-nil (sx-url-to-path "https://example.com"))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; find-matching-route with SX URLs
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "find-matching-route-sx-urls"
|
||||
(deftest "SX URL auto-converts for matching"
|
||||
(let ((routes (list
|
||||
{:pattern "/stream/<id>"
|
||||
:parsed (parse-route-pattern "/stream/<id>")
|
||||
:name "stream-page"
|
||||
:stream true
|
||||
:has-data true
|
||||
:content "expr"})))
|
||||
(let ((result (find-matching-route "/stream/fast" routes)))
|
||||
{:pattern "/language/docs/<slug>"
|
||||
:parsed (parse-route-pattern "/language/docs/<slug>")
|
||||
:name "docs-page"})))
|
||||
(let ((result (find-matching-route "/(language.(doc.introduction))" routes)))
|
||||
(assert-true (not (nil? result)))
|
||||
(assert-equal true (get result "stream"))
|
||||
(assert-equal "fast" (get (get result "params") "id"))
|
||||
(assert-equal "expr" (get result "content"))))))
|
||||
(assert-equal "docs-page" (get result "name"))
|
||||
(assert-equal "introduction" (get (get result "params") "slug"))))))
|
||||
|
||||
|
||||
;; ==========================================================================
|
||||
;; SX URL Resolution — Structural Navigation
|
||||
;; ==========================================================================
|
||||
|
||||
(defsuite "relative-sx-url?"
|
||||
(deftest "paren-form relative"
|
||||
(assert-true (relative-sx-url? "(.slug)"))
|
||||
(assert-true (relative-sx-url? "(..)"))
|
||||
(assert-true (relative-sx-url? "(..reactive.demo)")))
|
||||
|
||||
(deftest "bare-dot relative"
|
||||
(assert-true (relative-sx-url? ".slug"))
|
||||
(assert-true (relative-sx-url? ".."))
|
||||
(assert-true (relative-sx-url? "..."))
|
||||
(assert-true (relative-sx-url? ".:page.4")))
|
||||
|
||||
(deftest "absolute URLs are not relative"
|
||||
(assert-false (relative-sx-url? "/(language.(doc.intro))"))
|
||||
(assert-false (relative-sx-url? "/"))
|
||||
(assert-false (relative-sx-url? "/language/docs/intro")))
|
||||
|
||||
(deftest "special form URLs are not relative"
|
||||
(assert-false (relative-sx-url? "/(!source.(~essay))"))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Structural: append at current level (1 dot)
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "resolve-relative-url: append (.slug)"
|
||||
(deftest "append to deep URL"
|
||||
(assert-equal "/(geography.(hypermedia.(example.progress-bar)))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example)))"
|
||||
"(.progress-bar)")))
|
||||
|
||||
(deftest "append to single-level URL"
|
||||
(assert-equal "/(language.intro)"
|
||||
(resolve-relative-url "/(language)" "(.intro)")))
|
||||
|
||||
(deftest "append with multi-token body"
|
||||
(assert-equal "/(geography.(hypermedia.(example.progress-bar.v2)))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example)))"
|
||||
"(.progress-bar.v2)")))
|
||||
|
||||
(deftest "bare-dot shorthand"
|
||||
(assert-equal "/(geography.(hypermedia.(example.progress-bar)))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example)))"
|
||||
".progress-bar"))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Structural: go up one level (2 dots)
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "resolve-relative-url: up one (..slug)"
|
||||
(deftest "sibling call"
|
||||
(assert-equal "/(geography.(hypermedia.(reactive.demo)))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example)))"
|
||||
"(..reactive.demo)")))
|
||||
|
||||
(deftest "just go up — no new content"
|
||||
(assert-equal "/(geography.(hypermedia))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example)))"
|
||||
"(..)")))
|
||||
|
||||
(deftest "bare-dot shorthand for up"
|
||||
(assert-equal "/(geography.(hypermedia))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example)))"
|
||||
"..")))
|
||||
|
||||
(deftest "up from two-level URL"
|
||||
(assert-equal "/(language)"
|
||||
(resolve-relative-url "/(language.(doc))" "(..)")))
|
||||
|
||||
(deftest "up from single-level pops to root"
|
||||
(assert-equal "/"
|
||||
(resolve-relative-url "/(language)" "(..)"))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Structural: go up two levels (3 dots)
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "resolve-relative-url: up two (...slug)"
|
||||
(deftest "up two and push"
|
||||
(assert-equal "/(geography.(marshes))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example)))"
|
||||
"(...marshes)")))
|
||||
|
||||
(deftest "just up two — no content"
|
||||
(assert-equal "/(geography)"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example)))"
|
||||
"(...)")))
|
||||
|
||||
(deftest "bare-dot shorthand for up two"
|
||||
(assert-equal "/(geography)"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example)))"
|
||||
"...")))
|
||||
|
||||
(deftest "up two from two-level pops to root"
|
||||
(assert-equal "/"
|
||||
(resolve-relative-url "/(language.(doc))" "(...)")))
|
||||
|
||||
(deftest "up two and push from deep URL"
|
||||
;; 4-level URL, ... = 3 dots = pop 2 levels → at hypermedia level
|
||||
(assert-equal "/(geography.(hypermedia.(reactive.demo)))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(reference.(attributes))))"
|
||||
"(...reactive.demo)"))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Structural: up N levels (N+1 dots)
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "resolve-relative-url: up N"
|
||||
(deftest "up three levels (4 dots) from 4-level URL"
|
||||
;; 4-level URL, .... = 4 dots = pop 3 levels → at geography level
|
||||
(assert-equal "/(geography)"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(reference.(attributes))))"
|
||||
"(....)")))
|
||||
|
||||
(deftest "up three and push from 4-level URL"
|
||||
;; 4 dots = pop 3 → at geography, then push new-section
|
||||
(assert-equal "/(geography.(new-section))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(reference.(attributes))))"
|
||||
"(....new-section)")))
|
||||
|
||||
(deftest "up four levels (5 dots) pops to root"
|
||||
(assert-equal "/"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(reference.(attributes))))"
|
||||
"(.....)"))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Structural: current (1 dot, no body) = no-op
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "resolve-relative-url: current level no-op"
|
||||
(deftest "dot with no body is identity"
|
||||
;; (.): dots=1, body="" → no positional, no keywords → current unchanged
|
||||
(assert-equal "/(language.(doc.intro))"
|
||||
(resolve-relative-url "/(language.(doc.intro))" "(.)")))
|
||||
|
||||
(deftest "bare dot shorthand"
|
||||
(assert-equal "/(language.(doc.intro))"
|
||||
(resolve-relative-url "/(language.(doc.intro))" "."))))
|
||||
|
||||
|
||||
;; ==========================================================================
|
||||
;; SX URL Resolution — Keyword Operations
|
||||
;; ==========================================================================
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Keyword set: absolute value
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "resolve-relative-url: keyword set"
|
||||
(deftest "set keyword on URL without keywords"
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.4)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals)))"
|
||||
"(.:page.4)")))
|
||||
|
||||
(deftest "replace existing keyword"
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.4)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
"(.:page.4)")))
|
||||
|
||||
(deftest "set keyword with bare-dot shorthand"
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.4)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
".:page.4")))
|
||||
|
||||
(deftest "set keyword on single-level URL"
|
||||
(assert-equal "/(language.:page.1)"
|
||||
(resolve-relative-url "/(language)" "(.:page.1)")))
|
||||
|
||||
(deftest "set multiple keywords"
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.4.:section.batch)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
"(.:page.4.:section.batch)")))
|
||||
|
||||
(deftest "add new keyword preserving existing"
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.3.:section.batch)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
"(.:section.batch)"))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Keyword delta: +N / -N
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "resolve-relative-url: keyword delta"
|
||||
(deftest "increment by 1"
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.4)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
"(.:page.+1)")))
|
||||
|
||||
(deftest "decrement by 1"
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.2)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
"(.:page.-1)")))
|
||||
|
||||
(deftest "increment by larger amount"
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.13)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
"(.:page.+10)")))
|
||||
|
||||
(deftest "delta with bare-dot shorthand"
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.4)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
".:page.+1")))
|
||||
|
||||
(deftest "delta on missing keyword uses literal"
|
||||
;; If :page doesn't exist, +1 is used as-is (not numeric delta)
|
||||
(assert-equal "/(language.(spec.(explore.signals.:page.+1)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals)))"
|
||||
"(.:page.+1)"))))
|
||||
|
||||
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Composed: structural + keyword
|
||||
;; --------------------------------------------------------------------------
|
||||
|
||||
(defsuite "resolve-relative-url: composed structural + keyword"
|
||||
(deftest "append slug + set keyword"
|
||||
(assert-equal "/(language.(spec.(explore.signals.batch.:page.1)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals)))"
|
||||
"(.batch.:page.1)")))
|
||||
|
||||
(deftest "sibling + set keyword"
|
||||
(assert-equal "/(language.(spec.(eval.:page.1)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
"(..eval.:page.1)")))
|
||||
|
||||
(deftest "up two + set keyword"
|
||||
(assert-equal "/(geography.(reactive.demo.:page.1))"
|
||||
(resolve-relative-url
|
||||
"/(geography.(hypermedia.(example.progress-bar)))"
|
||||
"(...reactive.demo.:page.1)")))
|
||||
|
||||
(deftest "bare-dot composed"
|
||||
(assert-equal "/(language.(spec.(eval.:page.1)))"
|
||||
(resolve-relative-url
|
||||
"/(language.(spec.(explore.signals.:page.3)))"
|
||||
"..eval.:page.1"))))
|
||||
|
||||
|
||||
;; ==========================================================================
|
||||
;; SX URL Parsing — parse-sx-url
|
||||
;; ==========================================================================
|
||||
|
||||
(defsuite "parse-sx-url"
|
||||
(deftest "home URL"
|
||||
(let ((parsed (parse-sx-url "/")))
|
||||
(assert-equal "home" (get parsed "type"))
|
||||
(assert-equal "/" (get parsed "raw"))))
|
||||
|
||||
(deftest "absolute SX URL"
|
||||
(let ((parsed (parse-sx-url "/(language.(doc.intro))")))
|
||||
(assert-equal "absolute" (get parsed "type"))))
|
||||
|
||||
(deftest "relative paren-form"
|
||||
(let ((parsed (parse-sx-url "(.slug)")))
|
||||
(assert-equal "relative" (get parsed "type"))))
|
||||
|
||||
(deftest "relative bare-dot"
|
||||
(let ((parsed (parse-sx-url ".slug")))
|
||||
(assert-equal "relative" (get parsed "type"))))
|
||||
|
||||
(deftest "relative double-dot"
|
||||
(let ((parsed (parse-sx-url "..")))
|
||||
(assert-equal "relative" (get parsed "type"))))
|
||||
|
||||
(deftest "direct component"
|
||||
(let ((parsed (parse-sx-url "/(~essay-sx-sucks)")))
|
||||
(assert-equal "direct-component" (get parsed "type"))
|
||||
(assert-equal "~essay-sx-sucks" (get parsed "name"))))
|
||||
|
||||
(deftest "old-style path"
|
||||
(let ((parsed (parse-sx-url "/language/docs/intro")))
|
||||
(assert-equal "path" (get parsed "type")))))
|
||||
|
||||
|
||||
;; ==========================================================================
|
||||
;; URL Special Forms (! prefix)
|
||||
;; ==========================================================================
|
||||
|
||||
(defsuite "url-special-form?"
|
||||
(deftest "known special forms"
|
||||
(assert-true (url-special-form? "!source"))
|
||||
(assert-true (url-special-form? "!inspect"))
|
||||
(assert-true (url-special-form? "!diff"))
|
||||
(assert-true (url-special-form? "!search"))
|
||||
(assert-true (url-special-form? "!raw"))
|
||||
(assert-true (url-special-form? "!json")))
|
||||
|
||||
(deftest "unknown bang-prefix is not a special form"
|
||||
(assert-false (url-special-form? "!unknown"))
|
||||
(assert-false (url-special-form? "!foo")))
|
||||
|
||||
(deftest "non-bang names are not special forms"
|
||||
(assert-false (url-special-form? "source"))
|
||||
(assert-false (url-special-form? "language"))
|
||||
(assert-false (url-special-form? "~essay"))))
|
||||
|
||||
|
||||
(defsuite "parse-sx-url: special forms"
|
||||
(deftest "source special form"
|
||||
(let ((parsed (parse-sx-url "/(!source.(~essay-sx-sucks))")))
|
||||
(assert-equal "special-form" (get parsed "type"))
|
||||
(assert-equal "!source" (get parsed "form"))
|
||||
(assert-equal "(~essay-sx-sucks)" (get parsed "inner"))))
|
||||
|
||||
(deftest "inspect special form"
|
||||
(let ((parsed (parse-sx-url "/(!inspect.(language.(doc.primitives)))")))
|
||||
(assert-equal "special-form" (get parsed "type"))
|
||||
(assert-equal "!inspect" (get parsed "form"))
|
||||
(assert-equal "(language.(doc.primitives))" (get parsed "inner"))))
|
||||
|
||||
(deftest "diff special form with two args"
|
||||
(let ((parsed (parse-sx-url "/(!diff.(language.(spec.signals)).(language.(spec.eval)))")))
|
||||
(assert-equal "special-form" (get parsed "type"))
|
||||
(assert-equal "!diff" (get parsed "form"))
|
||||
(assert-equal "(language.(spec.signals)).(language.(spec.eval))" (get parsed "inner"))))
|
||||
|
||||
(deftest "raw special form"
|
||||
(let ((parsed (parse-sx-url "/(!raw.(~some-component))")))
|
||||
(assert-equal "special-form" (get parsed "type"))
|
||||
(assert-equal "!raw" (get parsed "form"))
|
||||
(assert-equal "(~some-component)" (get parsed "inner"))))
|
||||
|
||||
(deftest "json special form"
|
||||
(let ((parsed (parse-sx-url "/(!json.(language.(doc.primitives)))")))
|
||||
(assert-equal "special-form" (get parsed "type"))
|
||||
(assert-equal "!json" (get parsed "form"))
|
||||
(assert-equal "(language.(doc.primitives))" (get parsed "inner")))))
|
||||
|
||||
|
||||
(defsuite "url-special-form-name"
|
||||
(deftest "extracts form name"
|
||||
(assert-equal "!source"
|
||||
(url-special-form-name "/(!source.(~essay))")))
|
||||
|
||||
(deftest "returns nil for non-special-form"
|
||||
(assert-nil (url-special-form-name "/(language.(doc.intro))"))
|
||||
(assert-nil (url-special-form-name "/"))
|
||||
(assert-nil (url-special-form-name "(.slug)"))))
|
||||
|
||||
|
||||
(defsuite "url-special-form-inner"
|
||||
(deftest "extracts inner expression"
|
||||
(assert-equal "(~essay)"
|
||||
(url-special-form-inner "/(!source.(~essay))")))
|
||||
|
||||
(deftest "extracts multi-arg inner"
|
||||
(assert-equal "(a).(b)"
|
||||
(url-special-form-inner "/(!diff.(a).(b))")))
|
||||
|
||||
(deftest "returns nil for non-special-form"
|
||||
(assert-nil (url-special-form-inner "/(language.(doc.intro))"))))
|
||||
|
||||
|
||||
;; ==========================================================================
|
||||
;; Internal helpers — additional edge cases
|
||||
;; ==========================================================================
|
||||
|
||||
(defsuite "internal: _pop-sx-url-level"
|
||||
(deftest "pop three-level"
|
||||
(assert-equal "/(a.(b))"
|
||||
(_pop-sx-url-level "/(a.(b.(c)))")))
|
||||
|
||||
(deftest "pop two-level"
|
||||
(assert-equal "/(a)"
|
||||
(_pop-sx-url-level "/(a.(b))")))
|
||||
|
||||
(deftest "pop single-level to root"
|
||||
(assert-equal "/"
|
||||
(_pop-sx-url-level "/(a)")))
|
||||
|
||||
(deftest "pop root stays root"
|
||||
(assert-equal "/"
|
||||
(_pop-sx-url-level "/"))))
|
||||
|
||||
(defsuite "internal: _extract-innermost"
|
||||
(deftest "single-level URL"
|
||||
(let ((parts (_extract-innermost "/(language)")))
|
||||
(assert-equal "/(" (get parts "before"))
|
||||
(assert-equal "language" (get parts "content"))
|
||||
(assert-equal ")" (get parts "suffix"))))
|
||||
|
||||
(deftest "two-level URL"
|
||||
(let ((parts (_extract-innermost "/(language.(doc.intro))")))
|
||||
(assert-equal "/(language.(" (get parts "before"))
|
||||
(assert-equal "doc.intro" (get parts "content"))
|
||||
(assert-equal "))" (get parts "suffix"))))
|
||||
|
||||
(deftest "three-level URL with keywords"
|
||||
(let ((parts (_extract-innermost "/(a.(b.(c.d.:page.3)))")))
|
||||
(assert-equal "/(a.(b.(" (get parts "before"))
|
||||
(assert-equal "c.d.:page.3" (get parts "content"))
|
||||
(assert-equal ")))" (get parts "suffix")))))
|
||||
|
||||
(defsuite "internal: _find-keyword-value"
|
||||
(deftest "finds keyword"
|
||||
(assert-equal "3"
|
||||
(_find-keyword-value "explore.signals.:page.3" ":page")))
|
||||
|
||||
(deftest "returns nil when not found"
|
||||
(assert-nil (_find-keyword-value "explore.signals" ":page")))
|
||||
|
||||
(deftest "finds among multiple keywords"
|
||||
(assert-equal "batch"
|
||||
(_find-keyword-value "explore.signals.:page.3.:section.batch" ":section"))))
|
||||
|
||||
(defsuite "internal: _set-keyword-in-content"
|
||||
(deftest "replace existing"
|
||||
(assert-equal "a.b.:page.4"
|
||||
(_set-keyword-in-content "a.b.:page.3" ":page" "4")))
|
||||
|
||||
(deftest "append when missing"
|
||||
(assert-equal "a.b.:page.1"
|
||||
(_set-keyword-in-content "a.b" ":page" "1")))
|
||||
|
||||
(deftest "replace with multiple keywords present"
|
||||
(assert-equal "a.:page.4.:section.batch"
|
||||
(_set-keyword-in-content "a.:page.3.:section.batch" ":page" "4"))))
|
||||
|
||||
(defsuite "internal: _is-delta-value?"
|
||||
(deftest "positive delta"
|
||||
(assert-true (_is-delta-value? "+1"))
|
||||
(assert-true (_is-delta-value? "+10")))
|
||||
|
||||
(deftest "negative delta"
|
||||
(assert-true (_is-delta-value? "-1"))
|
||||
(assert-true (_is-delta-value? "-10")))
|
||||
|
||||
(deftest "bare minus is not delta"
|
||||
(assert-false (_is-delta-value? "-")))
|
||||
|
||||
(deftest "bare plus is not delta"
|
||||
(assert-false (_is-delta-value? "+")))
|
||||
|
||||
(deftest "plain number is not delta"
|
||||
(assert-false (_is-delta-value? "3"))
|
||||
(assert-false (_is-delta-value? "0")))
|
||||
|
||||
(deftest "empty string is not delta"
|
||||
(assert-false (_is-delta-value? ""))))
|
||||
|
||||
(defsuite "internal: _apply-delta"
|
||||
(deftest "increment"
|
||||
(assert-equal "4" (_apply-delta "3" "+1")))
|
||||
|
||||
(deftest "decrement"
|
||||
(assert-equal "2" (_apply-delta "3" "-1")))
|
||||
|
||||
(deftest "large increment"
|
||||
(assert-equal "13" (_apply-delta "3" "+10")))
|
||||
|
||||
(deftest "non-numeric current falls back"
|
||||
(assert-equal "+1" (_apply-delta "abc" "+1"))))
|
||||
|
||||
@@ -422,19 +422,52 @@ env["append!"] = _append_mut
|
||||
def _load_router_from_bootstrap(env):
|
||||
"""Load router functions from the bootstrapped sx_ref.py.
|
||||
|
||||
The hand-written evaluator can't run router.sx faithfully because
|
||||
set! inside lambda closures doesn't propagate to outer scopes
|
||||
(the evaluator uses dict copies, not cells). The bootstrapped code
|
||||
compiles set! to cell-based mutation, so we import from there.
|
||||
The hand-written evaluator can't run router.sx faithfully because:
|
||||
1. set! inside lambda closures doesn't propagate to outer scopes
|
||||
2. Deep recursive function chains exceed Python stack depth
|
||||
The bootstrapped code compiles to native Python, avoiding both issues.
|
||||
|
||||
Build sx_ref.py with --spec-modules=router to include these functions.
|
||||
"""
|
||||
try:
|
||||
from shared.sx.ref.sx_ref import (
|
||||
# Original route matching
|
||||
split_path_segments,
|
||||
parse_route_pattern,
|
||||
match_route_segments,
|
||||
match_route,
|
||||
find_matching_route,
|
||||
make_route_segment,
|
||||
# SX URL conversion
|
||||
sx_url_to_path,
|
||||
_fn_to_segment,
|
||||
# Relative URL resolution
|
||||
resolve_relative_url,
|
||||
relative_sx_url_p,
|
||||
_normalize_relative,
|
||||
_count_leading_dots,
|
||||
_strip_trailing_close,
|
||||
_last_index_of,
|
||||
_pop_sx_url_level,
|
||||
_pop_sx_url_levels,
|
||||
# Keyword operations
|
||||
_extract_innermost,
|
||||
_find_keyword_value,
|
||||
_find_kw_in_tokens,
|
||||
_set_keyword_in_content,
|
||||
_replace_kw_in_tokens,
|
||||
_is_delta_value_p,
|
||||
_apply_delta,
|
||||
_apply_kw_pairs,
|
||||
_apply_keywords_to_url,
|
||||
_parse_relative_body,
|
||||
_split_pos_kw,
|
||||
# URL special forms
|
||||
parse_sx_url,
|
||||
url_special_form_p,
|
||||
url_special_form_name,
|
||||
url_special_form_inner,
|
||||
_url_special_forms,
|
||||
)
|
||||
env["split-path-segments"] = split_path_segments
|
||||
env["parse-route-pattern"] = parse_route_pattern
|
||||
@@ -442,8 +475,34 @@ def _load_router_from_bootstrap(env):
|
||||
env["match-route"] = match_route
|
||||
env["find-matching-route"] = find_matching_route
|
||||
env["make-route-segment"] = make_route_segment
|
||||
env["sx-url-to-path"] = sx_url_to_path
|
||||
env["_fn-to-segment"] = _fn_to_segment
|
||||
env["resolve-relative-url"] = resolve_relative_url
|
||||
env["relative-sx-url?"] = relative_sx_url_p
|
||||
env["_normalize-relative"] = _normalize_relative
|
||||
env["_count-leading-dots"] = _count_leading_dots
|
||||
env["_strip-trailing-close"] = _strip_trailing_close
|
||||
env["_last-index-of"] = _last_index_of
|
||||
env["_pop-sx-url-level"] = _pop_sx_url_level
|
||||
env["_pop-sx-url-levels"] = _pop_sx_url_levels
|
||||
env["_extract-innermost"] = _extract_innermost
|
||||
env["_find-keyword-value"] = _find_keyword_value
|
||||
env["_find-kw-in-tokens"] = _find_kw_in_tokens
|
||||
env["_set-keyword-in-content"] = _set_keyword_in_content
|
||||
env["_replace-kw-in-tokens"] = _replace_kw_in_tokens
|
||||
env["_is-delta-value?"] = _is_delta_value_p
|
||||
env["_apply-delta"] = _apply_delta
|
||||
env["_apply-kw-pairs"] = _apply_kw_pairs
|
||||
env["_apply-keywords-to-url"] = _apply_keywords_to_url
|
||||
env["_parse-relative-body"] = _parse_relative_body
|
||||
env["_split-pos-kw"] = _split_pos_kw
|
||||
env["parse-sx-url"] = parse_sx_url
|
||||
env["url-special-form?"] = url_special_form_p
|
||||
env["url-special-form-name"] = url_special_form_name
|
||||
env["url-special-form-inner"] = url_special_form_inner
|
||||
env["_url-special-forms"] = _url_special_forms
|
||||
except ImportError:
|
||||
# Fallback: eval router.sx directly (may fail on set! scoping)
|
||||
# Fallback: eval router.sx directly (may fail on set!/recursion)
|
||||
eval_file("router.sx", env)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user