- Add spec-explorer-data-by-slug helper with _SPEC_SLUG_MAP - _find_spec_file searches spec/, web/, shared/sx/ref/ directories - defpage specs-explore-page uses :data for server-side data fetch - test_evaluator_renders_in_browser: failing test for client-side rendering (client re-evaluates defpage content, find-spec unavailable — pre-existing) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1720 lines
62 KiB
Python
1720 lines
62 KiB
Python
"""Page helper registration for sx docs.
|
|
|
|
All helpers return data values (dicts, lists) — no sx_call(), no SxExpr.
|
|
Markup composition lives entirely in .sx files.
|
|
"""
|
|
from __future__ import annotations
|
|
|
|
|
|
def _register_sx_helpers() -> None:
|
|
"""Register Python data helpers as page helpers."""
|
|
from shared.sx.pages import register_page_helpers
|
|
from content.highlight import highlight as _highlight
|
|
|
|
register_page_helpers("sx", {
|
|
"highlight": _highlight,
|
|
"component-source": _component_source,
|
|
"primitives-data": _primitives_data,
|
|
"special-forms-data": _special_forms_data,
|
|
"reference-data": _reference_data,
|
|
"attr-detail-data": _attr_detail_data,
|
|
"header-detail-data": _header_detail_data,
|
|
"event-detail-data": _event_detail_data,
|
|
"read-spec-file": _read_spec_file,
|
|
"bootstrapper-data": _bootstrapper_data,
|
|
"bundle-analyzer-data": _bundle_analyzer_data,
|
|
"routing-analyzer-data": _routing_analyzer_data,
|
|
"data-test-data": _data_test_data,
|
|
"run-spec-tests": _run_spec_tests,
|
|
"run-modular-tests": _run_modular_tests,
|
|
"streaming-demo-data": _streaming_demo_data,
|
|
"affinity-demo-data": _affinity_demo_data,
|
|
"optimistic-demo-data": _optimistic_demo_data,
|
|
"action:add-demo-item": _add_demo_item,
|
|
"offline-demo-data": _offline_demo_data,
|
|
"prove-data": _prove_data,
|
|
"page-helpers-demo-data": _page_helpers_demo_data,
|
|
"spec-explorer-data": _spec_explorer_data,
|
|
"spec-explorer-data-by-slug": _spec_explorer_data_by_slug,
|
|
"handler-source": _handler_source,
|
|
})
|
|
|
|
|
|
def _component_source(name: str) -> str:
|
|
"""Return the pretty-printed defcomp/defisland source for a named component."""
|
|
from shared.sx.jinja_bridge import get_component_env
|
|
from shared.sx.parser import serialize
|
|
from shared.sx.types import Component, Island
|
|
from shared.sx.ref.sx_ref import build_component_source
|
|
|
|
comp = get_component_env().get(name)
|
|
if isinstance(comp, Island):
|
|
return build_component_source({
|
|
"type": "island", "name": name,
|
|
"params": list(comp.params) if comp.params else [],
|
|
"has-children": comp.has_children,
|
|
"body-sx": serialize(comp.body, pretty=True),
|
|
"affinity": None,
|
|
})
|
|
if not isinstance(comp, Component):
|
|
return build_component_source({
|
|
"type": "not-found", "name": name,
|
|
"params": [], "has-children": False, "body-sx": "", "affinity": None,
|
|
})
|
|
return build_component_source({
|
|
"type": "component", "name": name,
|
|
"params": list(comp.params),
|
|
"has-children": comp.has_children,
|
|
"body-sx": serialize(comp.body, pretty=True),
|
|
"affinity": comp.affinity,
|
|
})
|
|
|
|
|
|
def _handler_source(name: str) -> str:
|
|
"""Return the pretty-printed defhandler source for a named handler."""
|
|
from shared.sx.handlers import get_handler
|
|
from shared.sx.parser import serialize
|
|
|
|
hdef = get_handler("sx", name)
|
|
if not hdef:
|
|
return f";;; Handler not found: {name}"
|
|
|
|
parts = [f"(defhandler {hdef.name}"]
|
|
if hdef.path:
|
|
parts.append(f' :path "{hdef.path}"')
|
|
if hdef.method != "get":
|
|
parts.append(f" :method :{hdef.method}")
|
|
if not hdef.csrf:
|
|
parts.append(" :csrf false")
|
|
if hdef.returns != "element":
|
|
parts.append(f' :returns "{hdef.returns}"')
|
|
param_strs = ["&key"] + list(hdef.params) if hdef.params else []
|
|
parts.append(f" ({' '.join(param_strs)})" if param_strs else " ()")
|
|
body_sx = serialize(hdef.body, pretty=True)
|
|
# Indent body by 2 spaces
|
|
body_lines = body_sx.split("\n")
|
|
parts.append(" " + body_lines[0])
|
|
for line in body_lines[1:]:
|
|
parts.append(" " + line)
|
|
return "\n".join(parts) + ")"
|
|
|
|
|
|
def _primitives_data() -> dict:
|
|
"""Return the PRIMITIVES dict for the primitives docs page."""
|
|
from content.pages import PRIMITIVES
|
|
return PRIMITIVES
|
|
|
|
|
|
def _special_forms_data() -> dict:
|
|
"""Parse special-forms.sx and return categorized form data."""
|
|
import os
|
|
from shared.sx.parser import parse_all
|
|
from shared.sx.ref.sx_ref import categorize_special_forms
|
|
|
|
ref_dir = _ref_dir()
|
|
spec_path = os.path.join(ref_dir, "special-forms.sx")
|
|
with open(spec_path) as f:
|
|
exprs = parse_all(f.read())
|
|
return categorize_special_forms(exprs)
|
|
|
|
|
|
def _reference_data(slug: str) -> dict:
|
|
"""Return reference table data for a given slug."""
|
|
from content.pages import (
|
|
REQUEST_ATTRS, BEHAVIOR_ATTRS, SX_UNIQUE_ATTRS,
|
|
REQUEST_HEADERS, RESPONSE_HEADERS,
|
|
EVENTS, JS_API, ATTR_DETAILS, HEADER_DETAILS,
|
|
)
|
|
from shared.sx.ref.sx_ref import build_reference_data
|
|
|
|
# Build raw data dict and detail keys based on slug
|
|
if slug == "attributes":
|
|
raw = {
|
|
"req-attrs": [list(t) for t in REQUEST_ATTRS],
|
|
"beh-attrs": [list(t) for t in BEHAVIOR_ATTRS],
|
|
"uniq-attrs": [list(t) for t in SX_UNIQUE_ATTRS],
|
|
}
|
|
detail_keys = list(ATTR_DETAILS.keys())
|
|
elif slug == "headers":
|
|
raw = {
|
|
"req-headers": [list(t) for t in REQUEST_HEADERS],
|
|
"resp-headers": [list(t) for t in RESPONSE_HEADERS],
|
|
}
|
|
detail_keys = list(HEADER_DETAILS.keys())
|
|
elif slug == "events":
|
|
from content.pages import EVENT_DETAILS
|
|
raw = {"events-list": [list(t) for t in EVENTS]}
|
|
detail_keys = list(EVENT_DETAILS.keys())
|
|
elif slug == "js-api":
|
|
raw = {"js-api-list": [list(t) for t in JS_API]}
|
|
detail_keys = []
|
|
else:
|
|
raw = {
|
|
"req-attrs": [list(t) for t in REQUEST_ATTRS],
|
|
"beh-attrs": [list(t) for t in BEHAVIOR_ATTRS],
|
|
"uniq-attrs": [list(t) for t in SX_UNIQUE_ATTRS],
|
|
}
|
|
detail_keys = list(ATTR_DETAILS.keys())
|
|
|
|
return build_reference_data(slug, raw, detail_keys)
|
|
|
|
|
|
def _find_spec_file(filename: str) -> str | None:
|
|
"""Find a spec .sx file across spec/, web/, shared/sx/ref/ directories."""
|
|
import os
|
|
base = os.path.join(os.path.dirname(__file__), "..", "..")
|
|
search_dirs = [
|
|
os.path.join(base, "spec"),
|
|
os.path.join(base, "web"),
|
|
os.path.join(base, "shared", "sx", "ref"),
|
|
"/app/spec",
|
|
"/app/web",
|
|
"/app/shared/sx/ref",
|
|
]
|
|
for d in search_dirs:
|
|
path = os.path.join(d, filename)
|
|
if os.path.isfile(path):
|
|
return path
|
|
return None
|
|
|
|
|
|
def _read_spec_file(filename: str) -> str:
|
|
"""Read a spec .sx file. Pure I/O — metadata lives in .sx."""
|
|
filepath = _find_spec_file(filename)
|
|
if not filepath:
|
|
return ";; spec file not found: " + filename
|
|
try:
|
|
with open(filepath, encoding="utf-8") as f:
|
|
return f.read()
|
|
except (FileNotFoundError, TypeError):
|
|
return ";; spec file not found: " + filename
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Spec explorer — translation + cross-reference helpers
|
|
# ---------------------------------------------------------------------------
|
|
|
|
_JS_SX_ENV = None # cached js.sx evaluator env
|
|
|
|
def _js_translate_define(expr: list, name: str) -> str | None:
|
|
"""Translate a single define expression to JavaScript via js.sx."""
|
|
global _JS_SX_ENV
|
|
if _JS_SX_ENV is None:
|
|
from shared.sx.ref.run_js_sx import load_js_sx
|
|
_JS_SX_ENV = load_js_sx()
|
|
from shared.sx.ref.sx_ref import evaluate
|
|
from shared.sx.types import Symbol
|
|
env = dict(_JS_SX_ENV)
|
|
env["_defines"] = [[name, expr]]
|
|
result = evaluate([Symbol("js-translate-file"), Symbol("_defines")], env)
|
|
if result and isinstance(result, str) and result.strip():
|
|
return result.strip()
|
|
return None
|
|
|
|
|
|
def _z3_translate_define(expr: list) -> str | None:
|
|
"""Translate a single define expression to SMT-LIB via z3.sx."""
|
|
from shared.sx.ref.reader_z3 import z3_translate
|
|
result = z3_translate(expr)
|
|
if result and isinstance(result, str) and result.strip():
|
|
return result.strip()
|
|
return None
|
|
|
|
|
|
_SPEC_INDEX: dict[str, str] | None = None # function name → spec slug
|
|
|
|
def _build_spec_index() -> dict[str, str]:
|
|
"""Build a global index mapping function names to spec file slugs."""
|
|
global _SPEC_INDEX
|
|
if _SPEC_INDEX is not None:
|
|
return _SPEC_INDEX
|
|
|
|
import os
|
|
import glob as globmod
|
|
from shared.sx.parser import parse_all
|
|
from shared.sx.types import Symbol, Keyword
|
|
|
|
ref_dir = os.path.join(os.path.dirname(__file__), "..", "..", "shared", "sx", "ref")
|
|
if not os.path.isdir(ref_dir):
|
|
ref_dir = "/app/shared/sx/ref"
|
|
|
|
index: dict[str, str] = {}
|
|
for fp in globmod.glob(os.path.join(ref_dir, "*.sx")):
|
|
basename = os.path.basename(fp)
|
|
if basename.startswith("test-"):
|
|
continue
|
|
slug = basename.replace(".sx", "")
|
|
try:
|
|
with open(fp, encoding="utf-8") as f:
|
|
content = f.read()
|
|
for expr in parse_all(content):
|
|
if not isinstance(expr, list) or len(expr) < 2:
|
|
continue
|
|
if not isinstance(expr[0], Symbol):
|
|
continue
|
|
head = expr[0].name
|
|
if head in ("define", "define-async"):
|
|
name = expr[1].name if isinstance(expr[1], Symbol) else str(expr[1])
|
|
index[name] = slug
|
|
except Exception:
|
|
continue
|
|
|
|
_SPEC_INDEX = index
|
|
return _SPEC_INDEX
|
|
|
|
|
|
# Test file → spec file mapping
|
|
_SPEC_TO_TEST = {
|
|
"signals.sx": "test-signals.sx",
|
|
"eval.sx": "test-eval.sx",
|
|
"parser.sx": "test-parser.sx",
|
|
"render.sx": "test-render.sx",
|
|
"engine.sx": "test-engine.sx",
|
|
"orchestration.sx": "test-orchestration.sx",
|
|
"router.sx": "test-router.sx",
|
|
"deps.sx": "test-deps.sx",
|
|
"adapter-sx.sx": "test-aser.sx",
|
|
"types.sx": "test-types.sx",
|
|
}
|
|
|
|
|
|
def _extract_tests_for_spec(filename: str) -> list[dict]:
|
|
"""Extract test suites/cases from the corresponding test file."""
|
|
import os
|
|
from shared.sx.parser import parse_all
|
|
from shared.sx.types import Symbol
|
|
|
|
test_file = _SPEC_TO_TEST.get(filename)
|
|
if not test_file:
|
|
return []
|
|
|
|
ref_dir = os.path.join(os.path.dirname(__file__), "..", "..", "shared", "sx", "ref")
|
|
if not os.path.isdir(ref_dir):
|
|
ref_dir = "/app/shared/sx/ref"
|
|
test_path = os.path.join(ref_dir, test_file)
|
|
|
|
try:
|
|
with open(test_path, encoding="utf-8") as f:
|
|
content = f.read()
|
|
exprs = parse_all(content)
|
|
except Exception:
|
|
return []
|
|
|
|
tests: list[dict] = []
|
|
for expr in exprs:
|
|
if not isinstance(expr, list) or len(expr) < 3:
|
|
continue
|
|
if not isinstance(expr[0], Symbol):
|
|
continue
|
|
if expr[0].name != "defsuite":
|
|
continue
|
|
suite_name = expr[1] if isinstance(expr[1], str) else str(expr[1])
|
|
test_names = []
|
|
for child in expr[2:]:
|
|
if isinstance(child, list) and len(child) >= 2:
|
|
if isinstance(child[0], Symbol) and child[0].name == "deftest":
|
|
tname = child[1] if isinstance(child[1], str) else str(child[1])
|
|
test_names.append(tname)
|
|
tests.append({"suite": suite_name, "tests": test_names})
|
|
return tests
|
|
|
|
|
|
def _match_tests_to_function(fn_name: str, all_tests: list[dict]) -> list[dict]:
|
|
"""Match test suites to a function by fuzzy name matching."""
|
|
matched = []
|
|
fn_lower = fn_name.lower().replace("-", " ").replace("!", "").replace("?", "")
|
|
fn_words = set(fn_lower.split())
|
|
for suite in all_tests:
|
|
suite_lower = suite["suite"].lower()
|
|
# Match if function name appears in suite name or suite name contains function
|
|
if fn_lower in suite_lower or any(w in suite_lower for w in fn_words if len(w) > 2):
|
|
matched.append(suite)
|
|
return matched
|
|
|
|
|
|
def _collect_symbols(expr) -> set[str]:
|
|
"""Recursively collect all Symbol names referenced in an expression."""
|
|
from shared.sx.types import Symbol
|
|
result: set[str] = set()
|
|
if isinstance(expr, Symbol):
|
|
result.add(expr.name)
|
|
elif isinstance(expr, list):
|
|
for item in expr:
|
|
result |= _collect_symbols(item)
|
|
elif isinstance(expr, dict):
|
|
for v in expr.values():
|
|
result |= _collect_symbols(v)
|
|
return result
|
|
|
|
|
|
_SPEC_SLUG_MAP = {
|
|
"parser": ("parser.sx", "Parser", "Tokenization and parsing"),
|
|
"evaluator": ("evaluator.sx", "Evaluator", "CEK machine evaluator"),
|
|
"primitives": ("primitives.sx", "Primitives", "Built-in pure functions"),
|
|
"render": ("render.sx", "Renderer", "Three rendering modes"),
|
|
"special-forms": ("special-forms.sx", "Special Forms", "Special form dispatch"),
|
|
"signals": ("signals.sx", "Signals", "Fine-grained reactive primitives"),
|
|
"adapter-dom": ("adapter-dom.sx", "DOM Adapter", "Client-side DOM rendering"),
|
|
"adapter-html": ("adapter-html.sx", "HTML Adapter", "Server-side HTML rendering"),
|
|
"adapter-sx": ("adapter-sx.sx", "SX Adapter", "SX wire format serialization"),
|
|
"engine": ("engine.sx", "SxEngine", "Pure logic for the browser engine"),
|
|
"orchestration": ("orchestration.sx", "Orchestration", "Browser lifecycle"),
|
|
"boot": ("boot.sx", "Boot", "Browser initialization"),
|
|
"router": ("router.sx", "Router", "URL parsing and route matching"),
|
|
"boundary": ("boundary.sx", "Boundary", "Language/platform boundary"),
|
|
"continuations": ("continuations.sx", "Continuations", "Delimited continuations"),
|
|
"types": ("types.sx", "Types", "Optional gradual type system"),
|
|
}
|
|
|
|
|
|
def _spec_explorer_data_by_slug(slug: str) -> dict | None:
|
|
"""Look up spec by slug and return explorer data."""
|
|
entry = _SPEC_SLUG_MAP.get(slug)
|
|
if not entry:
|
|
return None
|
|
filename, title, desc = entry
|
|
return _spec_explorer_data(filename, title, desc)
|
|
|
|
|
|
def _spec_explorer_data(filename: str, title: str = "", desc: str = "") -> dict | None:
|
|
"""Parse a spec file into structured metadata for the spec explorer.
|
|
|
|
Receives filename/title/desc from the SX routing layer (via find-spec).
|
|
Returns sections with defines, effects, params, source, and translations.
|
|
"""
|
|
import os
|
|
import re
|
|
from shared.sx.parser import parse_all
|
|
from shared.sx.types import Symbol, Keyword
|
|
|
|
if not filename:
|
|
return None
|
|
|
|
# Read the raw source
|
|
filepath = _find_spec_file(filename)
|
|
if not filepath:
|
|
return None
|
|
try:
|
|
with open(filepath, encoding="utf-8") as f:
|
|
source = f.read()
|
|
except FileNotFoundError:
|
|
return None
|
|
|
|
lines = source.split("\n")
|
|
|
|
# --- 1. Section splitting ---
|
|
sections: list[dict] = []
|
|
current_section: dict | None = None
|
|
i = 0
|
|
while i < len(lines):
|
|
line = lines[i]
|
|
# Detect section dividers: ;; ---...
|
|
if re.match(r"^;; -{10,}", line):
|
|
# Look for title in following comment lines
|
|
title_lines = []
|
|
j = i + 1
|
|
while j < len(lines) and lines[j].startswith(";;"):
|
|
content = lines[j][2:].strip()
|
|
if re.match(r"^-{10,}", content):
|
|
j += 1
|
|
break
|
|
if content:
|
|
title_lines.append(content)
|
|
j += 1
|
|
if title_lines:
|
|
section_title = title_lines[0]
|
|
# Collect comment block after section header
|
|
comment_lines = []
|
|
k = j
|
|
while k < len(lines) and lines[k].startswith(";;"):
|
|
c = lines[k][2:].strip()
|
|
if re.match(r"^-{5,}", c) or re.match(r"^={5,}", c):
|
|
break
|
|
if c:
|
|
comment_lines.append(c)
|
|
k += 1
|
|
current_section = {
|
|
"title": section_title,
|
|
"comment": " ".join(comment_lines) if comment_lines else None,
|
|
"defines": [],
|
|
}
|
|
sections.append(current_section)
|
|
i = j
|
|
continue
|
|
i += 1
|
|
|
|
# If no sections found, create a single implicit one
|
|
if not sections:
|
|
current_section = {"title": filename, "comment": None, "defines": []}
|
|
sections.append(current_section)
|
|
|
|
# --- 2. Parse AST ---
|
|
try:
|
|
exprs = parse_all(source)
|
|
except Exception:
|
|
exprs = []
|
|
|
|
# --- 3. Process each top-level define ---
|
|
# Build a line-number index: find where each top-level form starts
|
|
def _find_source_block(name: str, form: str = "define") -> tuple[str, int]:
|
|
"""Find the source text of a define form by scanning raw source."""
|
|
patterns = [
|
|
f"({form} {name} ",
|
|
f"({form} {name}\n",
|
|
]
|
|
for pat in patterns:
|
|
idx = source.find(pat)
|
|
if idx >= 0:
|
|
# Count balanced parens from idx
|
|
depth = 0
|
|
end = idx
|
|
for ci, ch in enumerate(source[idx:], idx):
|
|
if ch == "(":
|
|
depth += 1
|
|
elif ch == ")":
|
|
depth -= 1
|
|
if depth == 0:
|
|
end = ci + 1
|
|
break
|
|
line_num = source[:idx].count("\n") + 1
|
|
return source[idx:end], line_num
|
|
return "", 0
|
|
|
|
def _extract_effects(expr: list) -> list[str]:
|
|
"""Extract :effects [...] from a define form."""
|
|
if len(expr) >= 4 and isinstance(expr[2], Keyword) and expr[2].name == "effects":
|
|
eff_list = expr[3]
|
|
if isinstance(eff_list, list):
|
|
return [s.name if isinstance(s, Symbol) else str(s) for s in eff_list]
|
|
return []
|
|
|
|
def _extract_params(expr: list) -> list[dict]:
|
|
"""Extract params from the fn/lambda body of a define."""
|
|
# Find the fn/lambda form
|
|
val_expr = expr[4] if (len(expr) >= 5 and isinstance(expr[2], Keyword)
|
|
and expr[2].name == "effects") else expr[2] if len(expr) >= 3 else None
|
|
if not isinstance(val_expr, list) or not val_expr:
|
|
return []
|
|
if not isinstance(val_expr[0], Symbol):
|
|
return []
|
|
if val_expr[0].name not in ("fn", "lambda"):
|
|
return []
|
|
if len(val_expr) < 2 or not isinstance(val_expr[1], list):
|
|
return []
|
|
params_list = val_expr[1]
|
|
result = []
|
|
i = 0
|
|
while i < len(params_list):
|
|
p = params_list[i]
|
|
if isinstance(p, Symbol) and p.name in ("&rest", "&key"):
|
|
result.append({"name": p.name, "type": None})
|
|
i += 1
|
|
continue
|
|
if isinstance(p, Symbol):
|
|
result.append({"name": p.name, "type": None})
|
|
elif isinstance(p, list) and len(p) == 3:
|
|
# (name :as type)
|
|
name_s, kw, type_s = p
|
|
if isinstance(name_s, Symbol) and isinstance(kw, Keyword) and kw.name == "as":
|
|
type_str = type_s.name if isinstance(type_s, Symbol) else str(type_s)
|
|
result.append({"name": name_s.name, "type": type_str})
|
|
else:
|
|
result.append({"name": str(p), "type": None})
|
|
else:
|
|
result.append({"name": str(p), "type": None})
|
|
i += 1
|
|
return result
|
|
|
|
# Process defines
|
|
all_defines: list[dict] = []
|
|
py_emitter = None
|
|
|
|
for expr in exprs:
|
|
if not isinstance(expr, list) or len(expr) < 2:
|
|
continue
|
|
if not isinstance(expr[0], Symbol):
|
|
continue
|
|
|
|
head = expr[0].name
|
|
if head not in ("define", "define-async"):
|
|
continue
|
|
|
|
name_node = expr[1]
|
|
name = name_node.name if isinstance(name_node, Symbol) else str(name_node)
|
|
|
|
effects = _extract_effects(expr)
|
|
params = _extract_params(expr)
|
|
src, line_num = _find_source_block(name, head)
|
|
|
|
kind = "function"
|
|
# Check if it's a constant (no fn/lambda body)
|
|
val_idx = 4 if (len(expr) >= 5 and isinstance(expr[2], Keyword)
|
|
and expr[2].name == "effects") else 2
|
|
if val_idx < len(expr):
|
|
val = expr[val_idx]
|
|
if isinstance(val, list) and val and isinstance(val[0], Symbol) and val[0].name in ("fn", "lambda"):
|
|
kind = "async-function" if head == "define-async" else "function"
|
|
else:
|
|
kind = "constant"
|
|
if head == "define-async":
|
|
kind = "async-function"
|
|
|
|
# --- Python translation ---
|
|
py_code = None
|
|
try:
|
|
if py_emitter is None:
|
|
from shared.sx.ref.bootstrap_py import PyEmitter
|
|
py_emitter = PyEmitter()
|
|
if head == "define-async":
|
|
py_code = py_emitter._emit_define_async(expr)
|
|
else:
|
|
py_code = py_emitter._emit_define(expr)
|
|
except Exception:
|
|
pass
|
|
|
|
# --- JavaScript translation ---
|
|
js_code = None
|
|
try:
|
|
js_code = _js_translate_define(expr, name)
|
|
except Exception:
|
|
pass
|
|
|
|
# --- Z3/SMT-LIB translation ---
|
|
z3_code = None
|
|
try:
|
|
z3_code = _z3_translate_define(expr)
|
|
except Exception:
|
|
pass
|
|
|
|
# --- Cross-references ---
|
|
refs = []
|
|
platform_deps = []
|
|
try:
|
|
spec_index = _build_spec_index()
|
|
body_symbols = _collect_symbols(expr)
|
|
own_names = {name}
|
|
for sym in body_symbols - own_names:
|
|
if sym in spec_index:
|
|
refs.append(sym)
|
|
# Symbols not in any spec file might be platform primitives
|
|
except Exception:
|
|
pass
|
|
|
|
define_entry = {
|
|
"name": name,
|
|
"kind": kind,
|
|
"effects": effects,
|
|
"params": params,
|
|
"source": src,
|
|
"line": line_num,
|
|
"python": py_code,
|
|
"javascript": js_code,
|
|
"z3": z3_code,
|
|
"refs": refs,
|
|
"tests": [],
|
|
"test-count": 0,
|
|
}
|
|
all_defines.append(define_entry)
|
|
|
|
# --- Assign defines to sections ---
|
|
# Match by line number: each define belongs to the section whose header
|
|
# precedes it in the source
|
|
section_line_map: list[tuple[int, dict]] = []
|
|
for s in sections:
|
|
# Find the line where section title appears
|
|
t = s["title"]
|
|
for li, line in enumerate(lines, 1):
|
|
if t in line:
|
|
section_line_map.append((li, s))
|
|
break
|
|
section_line_map.sort(key=lambda x: x[0])
|
|
|
|
for d in all_defines:
|
|
dl = d.get("line", 0)
|
|
target_section = sections[0]
|
|
for sl, s in section_line_map:
|
|
if dl >= sl:
|
|
target_section = s
|
|
target_section["defines"].append(d)
|
|
|
|
# --- Test matching ---
|
|
all_tests = _extract_tests_for_spec(filename)
|
|
test_total = 0
|
|
for d in all_defines:
|
|
matched = _match_tests_to_function(d["name"], all_tests)
|
|
if matched:
|
|
test_names = []
|
|
for suite in matched:
|
|
for t in suite["tests"]:
|
|
test_names.append({"name": t, "suite": suite["suite"]})
|
|
d["tests"] = test_names
|
|
d["test-count"] = len(test_names)
|
|
test_total += len(test_names)
|
|
|
|
# --- Stats ---
|
|
pure_count = sum(1 for d in all_defines if not d["effects"])
|
|
mutation_count = sum(1 for d in all_defines if "mutation" in d["effects"])
|
|
io_count = sum(1 for d in all_defines if "io" in d["effects"])
|
|
render_count = sum(1 for d in all_defines if "render" in d["effects"])
|
|
|
|
# --- Platform interface ---
|
|
platform_items = []
|
|
for line in lines:
|
|
m = re.match(r"^;;\s+\((\S+)\s+(.*?)\)\s+→\s+(\S+)\s+—\s+(.+)", line)
|
|
if m:
|
|
platform_items.append({
|
|
"name": m.group(1),
|
|
"params": m.group(2),
|
|
"returns": m.group(3),
|
|
"doc": m.group(4).strip(),
|
|
})
|
|
|
|
# Filter out empty sections
|
|
sections = [s for s in sections if s["defines"]]
|
|
|
|
return {
|
|
"filename": filename,
|
|
"title": title,
|
|
"desc": desc,
|
|
"sections": sections,
|
|
"platform-interface": platform_items,
|
|
"stats": {
|
|
"total-defines": len(all_defines),
|
|
"pure-count": pure_count,
|
|
"mutation-count": mutation_count,
|
|
"io-count": io_count,
|
|
"render-count": render_count,
|
|
"lines": len(lines),
|
|
"test-total": test_total,
|
|
},
|
|
}
|
|
|
|
|
|
def _bootstrapper_data(target: str) -> dict:
|
|
"""Return bootstrapper source and generated output for a target.
|
|
|
|
Returns a dict whose keys become SX env bindings:
|
|
- bootstrapper-source: the Python bootstrapper source code
|
|
- bootstrapped-output: the generated JavaScript
|
|
- bootstrapper-not-found: truthy if target unknown
|
|
"""
|
|
import os
|
|
|
|
if target not in ("javascript", "python", "self-hosting", "self-hosting-js"):
|
|
return {"bootstrapper-not-found": True}
|
|
|
|
ref_dir = os.path.join(os.path.dirname(__file__), "..", "..", "shared", "sx", "ref")
|
|
if not os.path.isdir(ref_dir):
|
|
ref_dir = "/app/shared/sx/ref"
|
|
|
|
if target == "self-hosting":
|
|
return _self_hosting_data(ref_dir)
|
|
if target == "self-hosting-js":
|
|
return _js_self_hosting_data(ref_dir)
|
|
|
|
if target == "javascript":
|
|
# Read bootstrapper source
|
|
bs_path = os.path.join(ref_dir, "bootstrap_js.py")
|
|
try:
|
|
with open(bs_path, encoding="utf-8") as f:
|
|
bootstrapper_source = f.read()
|
|
except FileNotFoundError:
|
|
bootstrapper_source = "# bootstrapper source not found"
|
|
|
|
# Run the bootstrap to generate JS
|
|
from shared.sx.ref.bootstrap_js import compile_ref_to_js
|
|
try:
|
|
bootstrapped_output = compile_ref_to_js(
|
|
adapters=["dom", "engine", "orchestration", "boot", "cssx"]
|
|
)
|
|
except Exception as e:
|
|
bootstrapped_output = f"// bootstrap error: {e}"
|
|
|
|
elif target == "python":
|
|
bs_path = os.path.join(ref_dir, "bootstrap_py.py")
|
|
try:
|
|
with open(bs_path, encoding="utf-8") as f:
|
|
bootstrapper_source = f.read()
|
|
except FileNotFoundError:
|
|
bootstrapper_source = "# bootstrapper source not found"
|
|
|
|
from shared.sx.ref.bootstrap_py import compile_ref_to_py
|
|
try:
|
|
bootstrapped_output = compile_ref_to_py()
|
|
except Exception as e:
|
|
bootstrapped_output = f"# bootstrap error: {e}"
|
|
|
|
return {
|
|
"bootstrapper-not-found": None,
|
|
"bootstrapper-source": bootstrapper_source,
|
|
"bootstrapped-output": bootstrapped_output,
|
|
}
|
|
|
|
|
|
def _self_hosting_data(ref_dir: str) -> dict:
|
|
"""Run py.sx live: load into evaluator, translate spec files, diff against G0."""
|
|
import os
|
|
from shared.sx.parser import parse_all
|
|
from shared.sx.types import Symbol
|
|
from shared.sx.ref.sx_ref import evaluate, make_env
|
|
from shared.sx.ref.bootstrap_py import extract_defines, compile_ref_to_py, PyEmitter
|
|
|
|
try:
|
|
# Read py.sx source
|
|
py_sx_path = os.path.join(ref_dir, "py.sx")
|
|
with open(py_sx_path, encoding="utf-8") as f:
|
|
py_sx_source = f.read()
|
|
|
|
# Load py.sx into evaluator
|
|
exprs = parse_all(py_sx_source)
|
|
env = make_env()
|
|
for expr in exprs:
|
|
evaluate(expr, env)
|
|
|
|
# Generate G0 (hand-written bootstrapper)
|
|
g0_output = compile_ref_to_py()
|
|
|
|
# Generate G1 (py.sx) — translate each spec file
|
|
sx_files = [
|
|
("eval.sx", "eval"), ("forms.sx", "forms (server definition forms)"),
|
|
("render.sx", "render (core)"),
|
|
("adapter-html.sx", "adapter-html"), ("adapter-sx.sx", "adapter-sx"),
|
|
("deps.sx", "deps (component dependency analysis)"),
|
|
("signals.sx", "signals (reactive signal runtime)"),
|
|
]
|
|
emitter = PyEmitter()
|
|
total = 0
|
|
matched = 0
|
|
for filename, _label in sx_files:
|
|
filepath = os.path.join(ref_dir, filename)
|
|
if not os.path.exists(filepath):
|
|
continue
|
|
with open(filepath, encoding="utf-8") as f:
|
|
src = f.read()
|
|
defines = extract_defines(src)
|
|
for name, expr in defines:
|
|
g0_stmt = emitter.emit_statement(expr)
|
|
g1_stmt = evaluate(
|
|
[Symbol("py-statement"), [Symbol("quote"), expr], 0], env
|
|
)
|
|
total += 1
|
|
if g0_stmt == g1_stmt:
|
|
matched += 1
|
|
|
|
g0_lines = len(g0_output.splitlines())
|
|
g0_bytes = len(g0_output)
|
|
status = "identical" if matched == total else "mismatch"
|
|
|
|
except Exception as e:
|
|
py_sx_source = f";; error loading py.sx: {e}"
|
|
g0_output = f"# error: {e}"
|
|
matched, total = 0, 0
|
|
g0_lines, g0_bytes = 0, 0
|
|
status = "error"
|
|
|
|
return {
|
|
"bootstrapper-not-found": None,
|
|
"py-sx-source": py_sx_source,
|
|
"g0-output": g0_output,
|
|
"g1-output": g0_output if status == "identical" else "# differs from G0",
|
|
"defines-matched": str(matched),
|
|
"defines-total": str(total),
|
|
"g0-lines": str(g0_lines),
|
|
"g0-bytes": str(g0_bytes),
|
|
"verification-status": status,
|
|
}
|
|
|
|
|
|
def _js_self_hosting_data(ref_dir: str) -> dict:
|
|
"""Run js.sx live: load into evaluator, translate all spec defines."""
|
|
import os
|
|
from shared.sx.types import Symbol
|
|
from shared.sx.ref.sx_ref import evaluate
|
|
from shared.sx.ref.run_js_sx import load_js_sx
|
|
from shared.sx.ref.platform_js import extract_defines
|
|
|
|
try:
|
|
js_sx_path = os.path.join(ref_dir, "js.sx")
|
|
with open(js_sx_path, encoding="utf-8") as f:
|
|
js_sx_source = f.read()
|
|
|
|
env = load_js_sx()
|
|
|
|
# All spec files
|
|
all_files = sorted(
|
|
f for f in os.listdir(ref_dir) if f.endswith(".sx")
|
|
)
|
|
total = 0
|
|
for filename in all_files:
|
|
filepath = os.path.join(ref_dir, filename)
|
|
with open(filepath, encoding="utf-8") as f:
|
|
src = f.read()
|
|
defines = extract_defines(src)
|
|
for name, expr in defines:
|
|
env["_def_expr"] = expr
|
|
evaluate(
|
|
[Symbol("js-statement"), Symbol("_def_expr")], env
|
|
)
|
|
total += 1
|
|
|
|
status = "ok"
|
|
|
|
except Exception as e:
|
|
js_sx_source = f";; error loading js.sx: {e}"
|
|
total = 0
|
|
status = "error"
|
|
|
|
return {
|
|
"bootstrapper-not-found": None,
|
|
"js-sx-source": js_sx_source,
|
|
"defines-matched": str(total),
|
|
"defines-total": str(total),
|
|
"js-sx-lines": str(len(js_sx_source.splitlines())),
|
|
"verification-status": status,
|
|
}
|
|
|
|
|
|
def _bundle_analyzer_data() -> dict:
|
|
"""Compute per-page component bundle analysis for the sx-docs app."""
|
|
from shared.sx.jinja_bridge import get_component_env
|
|
from shared.sx.pages import get_all_pages
|
|
from shared.sx.deps import components_needed, scan_components_from_sx
|
|
from shared.sx.parser import serialize
|
|
from shared.sx.types import Component, Macro
|
|
from shared.sx.ref.sx_ref import build_bundle_analysis
|
|
|
|
env = get_component_env()
|
|
total_components = sum(1 for v in env.values() if isinstance(v, Component))
|
|
total_macros = sum(1 for v in env.values() if isinstance(v, Macro))
|
|
pure_count = sum(1 for v in env.values() if isinstance(v, Component) and v.is_pure)
|
|
io_count = total_components - pure_count
|
|
|
|
# Extract raw data at I/O edge — Python accesses Component objects, serializes bodies
|
|
pages_raw = []
|
|
components_raw: dict[str, dict] = {}
|
|
for name, page_def in sorted(get_all_pages("sx").items()):
|
|
content_sx = serialize(page_def.content_expr)
|
|
direct = scan_components_from_sx(content_sx)
|
|
needed = sorted(components_needed(content_sx, env))
|
|
|
|
for comp_name in needed:
|
|
if comp_name not in components_raw:
|
|
val = env.get(comp_name)
|
|
if isinstance(val, Component):
|
|
param_strs = ["&key"] + list(val.params)
|
|
if val.has_children:
|
|
param_strs.extend(["&rest", "children"])
|
|
params_sx = "(" + " ".join(param_strs) + ")"
|
|
body_sx = serialize(val.body, pretty=True)
|
|
components_raw[comp_name] = {
|
|
"is-pure": val.is_pure,
|
|
"affinity": val.affinity,
|
|
"render-target": val.render_target,
|
|
"io-refs": sorted(val.io_refs),
|
|
"deps": sorted(val.deps),
|
|
"source": f"(defcomp ~{val.name} {params_sx}\n {body_sx})",
|
|
}
|
|
|
|
pages_raw.append({
|
|
"name": name,
|
|
"path": page_def.path,
|
|
"direct": len(direct),
|
|
"needed-names": needed,
|
|
})
|
|
|
|
# Pure data transformation in SX spec
|
|
result = build_bundle_analysis(
|
|
pages_raw, components_raw,
|
|
total_components, total_macros, pure_count, io_count,
|
|
)
|
|
# Sort pages by needed count (descending) — SX has no sort primitive
|
|
result["pages"] = sorted(result["pages"], key=lambda p: p["needed"], reverse=True)
|
|
return result
|
|
|
|
|
|
def _routing_analyzer_data() -> dict:
|
|
"""Compute per-page routing classification for the sx-docs app."""
|
|
from shared.sx.pages import get_all_pages
|
|
from shared.sx.parser import serialize as sx_serialize
|
|
from shared.sx.helpers import _sx_literal
|
|
from shared.sx.ref.sx_ref import build_routing_analysis
|
|
|
|
# I/O edge: extract page data from page registry
|
|
pages_raw = []
|
|
full_content: list[tuple[str, str, bool]] = []
|
|
for name, page_def in sorted(get_all_pages("sx").items()):
|
|
has_data = page_def.data_expr is not None
|
|
content_src = ""
|
|
if page_def.content_expr is not None:
|
|
try:
|
|
content_src = sx_serialize(page_def.content_expr)
|
|
except Exception:
|
|
pass
|
|
pages_raw.append({
|
|
"name": name, "path": page_def.path,
|
|
"has-data": has_data, "content-src": content_src,
|
|
})
|
|
full_content.append((name, content_src, has_data))
|
|
|
|
# Pure classification in SX spec
|
|
result = build_routing_analysis(pages_raw)
|
|
# Sort: client pages first, then server (SX has no sort primitive)
|
|
result["pages"] = sorted(
|
|
result["pages"],
|
|
key=lambda p: (0 if p["mode"] == "client" else 1, p["name"]),
|
|
)
|
|
|
|
# Build registry sample (uses _sx_literal which is Python string escaping)
|
|
sample_entries = []
|
|
sorted_full = sorted(full_content, key=lambda x: x[0])
|
|
for name, csrc, hd in sorted_full[:3]:
|
|
page_def = get_all_pages("sx").get(name)
|
|
if not page_def:
|
|
continue
|
|
entry = (
|
|
"{:name " + _sx_literal(name)
|
|
+ "\n :path " + _sx_literal(page_def.path)
|
|
+ "\n :auth " + _sx_literal("public")
|
|
+ " :has-data " + ("true" if hd else "false")
|
|
+ "\n :content " + _sx_literal(csrc)
|
|
+ "\n :closure {}}"
|
|
)
|
|
sample_entries.append(entry)
|
|
result["registry-sample"] = "\n\n".join(sample_entries)
|
|
|
|
return result
|
|
|
|
|
|
def _attr_detail_data(slug: str) -> dict:
|
|
"""Return attribute detail data for a specific attribute slug."""
|
|
from content.pages import ATTR_DETAILS
|
|
from shared.sx.helpers import sx_call
|
|
from shared.sx.ref.sx_ref import build_attr_detail
|
|
|
|
detail = ATTR_DETAILS.get(slug)
|
|
result = build_attr_detail(slug, detail)
|
|
# Convert demo name to sx_call if present
|
|
demo_name = result.get("attr-demo")
|
|
if demo_name:
|
|
result["attr-demo"] = sx_call(demo_name)
|
|
return result
|
|
|
|
|
|
def _header_detail_data(slug: str) -> dict:
|
|
"""Return header detail data for a specific header slug."""
|
|
from content.pages import HEADER_DETAILS
|
|
from shared.sx.helpers import sx_call
|
|
from shared.sx.ref.sx_ref import build_header_detail
|
|
|
|
result = build_header_detail(slug, HEADER_DETAILS.get(slug))
|
|
demo_name = result.get("header-demo")
|
|
if demo_name:
|
|
result["header-demo"] = sx_call(demo_name)
|
|
return result
|
|
|
|
|
|
def _event_detail_data(slug: str) -> dict:
|
|
"""Return event detail data for a specific event slug."""
|
|
from content.pages import EVENT_DETAILS
|
|
from shared.sx.helpers import sx_call
|
|
from shared.sx.ref.sx_ref import build_event_detail
|
|
|
|
result = build_event_detail(slug, EVENT_DETAILS.get(slug))
|
|
demo_name = result.get("event-demo")
|
|
if demo_name:
|
|
result["event-demo"] = sx_call(demo_name)
|
|
return result
|
|
|
|
|
|
def _run_spec_tests() -> dict:
|
|
"""Run test.sx against the Python SX evaluator and return results."""
|
|
import os
|
|
import time
|
|
from shared.sx.parser import parse_all
|
|
from shared.sx.ref.sx_ref import eval_expr as _eval, trampoline as _trampoline
|
|
|
|
ref_dir = os.path.join(os.path.dirname(__file__), "..", "..", "shared", "sx", "ref")
|
|
if not os.path.isdir(ref_dir):
|
|
ref_dir = "/app/shared/sx/ref"
|
|
test_path = os.path.join(ref_dir, "test.sx")
|
|
with open(test_path, encoding="utf-8") as f:
|
|
src = f.read()
|
|
|
|
suite_stack: list[str] = []
|
|
passed = 0
|
|
failed = 0
|
|
test_num = 0
|
|
lines: list[str] = []
|
|
|
|
def try_call(thunk):
|
|
try:
|
|
_trampoline(_eval([thunk], {}))
|
|
return {"ok": True}
|
|
except Exception as e:
|
|
return {"ok": False, "error": str(e)}
|
|
|
|
def report_pass(name):
|
|
nonlocal passed, test_num
|
|
test_num += 1
|
|
passed += 1
|
|
lines.append("ok " + str(test_num) + " - " + " > ".join(suite_stack + [name]))
|
|
|
|
def report_fail(name, error):
|
|
nonlocal failed, test_num
|
|
test_num += 1
|
|
failed += 1
|
|
full = " > ".join(suite_stack + [name])
|
|
lines.append("not ok " + str(test_num) + " - " + full)
|
|
lines.append(" # " + str(error))
|
|
|
|
def push_suite(name):
|
|
suite_stack.append(name)
|
|
|
|
def pop_suite():
|
|
suite_stack.pop()
|
|
|
|
env = {
|
|
"try-call": try_call,
|
|
"report-pass": report_pass,
|
|
"report-fail": report_fail,
|
|
"push-suite": push_suite,
|
|
"pop-suite": pop_suite,
|
|
}
|
|
|
|
t0 = time.monotonic()
|
|
exprs = parse_all(src)
|
|
for expr in exprs:
|
|
_trampoline(_eval(expr, env))
|
|
elapsed = round((time.monotonic() - t0) * 1000)
|
|
|
|
return {
|
|
"passed": passed,
|
|
"failed": failed,
|
|
"total": passed + failed,
|
|
"elapsed-ms": elapsed,
|
|
"output": "\n".join(lines),
|
|
}
|
|
|
|
|
|
def _run_modular_tests(spec_name: str) -> dict:
|
|
"""Run modular test specs against the Python SX evaluator.
|
|
|
|
spec_name: "eval", "parser", "router", "render", or "all".
|
|
Returns dict with server-results key containing results per spec.
|
|
"""
|
|
import os
|
|
import time
|
|
from shared.sx.parser import parse_all
|
|
from shared.sx.ref.sx_ref import eval_expr as _eval, trampoline as _trampoline
|
|
from shared.sx.types import Symbol, Keyword, Lambda, NIL
|
|
|
|
ref_dir = os.path.join(os.path.dirname(__file__), "..", "..", "shared", "sx", "ref")
|
|
if not os.path.isdir(ref_dir):
|
|
ref_dir = "/app/shared/sx/ref"
|
|
|
|
suite_stack: list[str] = []
|
|
passed = 0
|
|
failed = 0
|
|
test_num = 0
|
|
lines: list[str] = []
|
|
|
|
def try_call(thunk):
|
|
try:
|
|
_trampoline(_eval([thunk], env))
|
|
return {"ok": True}
|
|
except Exception as e:
|
|
return {"ok": False, "error": str(e)}
|
|
|
|
def report_pass(name):
|
|
nonlocal passed, test_num
|
|
test_num += 1
|
|
passed += 1
|
|
lines.append("ok " + str(test_num) + " - " + " > ".join(suite_stack + [name]))
|
|
|
|
def report_fail(name, error):
|
|
nonlocal failed, test_num
|
|
test_num += 1
|
|
failed += 1
|
|
full = " > ".join(suite_stack + [name])
|
|
lines.append("not ok " + str(test_num) + " - " + full)
|
|
lines.append(" # " + str(error))
|
|
|
|
def push_suite(name):
|
|
suite_stack.append(name)
|
|
|
|
def pop_suite():
|
|
suite_stack.pop()
|
|
|
|
def sx_parse(source):
|
|
return parse_all(source)
|
|
|
|
def sx_serialize(val):
|
|
if val is None or val is NIL:
|
|
return "nil"
|
|
if isinstance(val, bool):
|
|
return "true" if val else "false"
|
|
if isinstance(val, (int, float)):
|
|
return str(val)
|
|
if isinstance(val, str):
|
|
escaped = val.replace("\\", "\\\\").replace('"', '\\"')
|
|
return f'"{escaped}"'
|
|
if isinstance(val, Symbol):
|
|
return val.name
|
|
if isinstance(val, Keyword):
|
|
return f":{val.name}"
|
|
if isinstance(val, list):
|
|
inner = " ".join(sx_serialize(x) for x in val)
|
|
return f"({inner})"
|
|
if isinstance(val, dict):
|
|
parts = []
|
|
for k, v in val.items():
|
|
parts.append(f":{k}")
|
|
parts.append(sx_serialize(v))
|
|
return "{" + " ".join(parts) + "}"
|
|
return str(val)
|
|
|
|
def render_html(sx_source):
|
|
try:
|
|
from shared.sx.ref.sx_ref import render_to_html as _render_to_html
|
|
except ImportError:
|
|
return "<!-- render-to-html not available -->"
|
|
exprs = parse_all(sx_source)
|
|
render_env = dict(env)
|
|
result = ""
|
|
for expr in exprs:
|
|
result += _render_to_html(expr, render_env)
|
|
return result
|
|
|
|
def _call_sx(fn, args, caller_env):
|
|
if isinstance(fn, Lambda):
|
|
from shared.sx.ref.sx_ref import call_lambda as _call_lambda
|
|
return _trampoline(_call_lambda(fn, list(args), caller_env))
|
|
return fn(*args)
|
|
|
|
def _for_each_indexed(fn, coll):
|
|
if isinstance(fn, Lambda):
|
|
closure = fn.closure
|
|
for i, item in enumerate(coll or []):
|
|
for p, v in zip(fn.params, [i, item]):
|
|
closure[p] = v
|
|
_trampoline(_eval(fn.body, closure))
|
|
else:
|
|
for i, item in enumerate(coll or []):
|
|
fn(i, item)
|
|
return NIL
|
|
|
|
env = {
|
|
"try-call": try_call,
|
|
"report-pass": report_pass,
|
|
"report-fail": report_fail,
|
|
"push-suite": push_suite,
|
|
"pop-suite": pop_suite,
|
|
"sx-parse": sx_parse,
|
|
"sx-serialize": sx_serialize,
|
|
"make-symbol": lambda name: Symbol(name),
|
|
"make-keyword": lambda name: Keyword(name),
|
|
"symbol-name": lambda sym: sym.name if isinstance(sym, Symbol) else str(sym),
|
|
"keyword-name": lambda kw: kw.name if isinstance(kw, Keyword) else str(kw),
|
|
"render-html": render_html,
|
|
"for-each-indexed": _for_each_indexed,
|
|
"dict-set!": lambda d, k, v: (d.__setitem__(k, v), NIL)[-1] if isinstance(d, dict) else NIL,
|
|
"dict-has?": lambda d, k: isinstance(d, dict) and k in d,
|
|
"dict-get": lambda d, k: d.get(k, NIL) if isinstance(d, dict) else NIL,
|
|
"append!": lambda lst, item: (lst.append(item), NIL)[-1] if isinstance(lst, list) else NIL,
|
|
"inc": lambda n: n + 1,
|
|
}
|
|
|
|
def eval_file(filename):
|
|
filepath = os.path.join(ref_dir, filename)
|
|
if not os.path.exists(filepath):
|
|
return
|
|
with open(filepath) as f:
|
|
src = f.read()
|
|
exprs = parse_all(src)
|
|
for expr in exprs:
|
|
_trampoline(_eval(expr, env))
|
|
|
|
SPECS = {
|
|
"eval": {"file": "test-eval.sx", "needs": []},
|
|
"parser": {"file": "test-parser.sx", "needs": ["sx-parse"]},
|
|
"router": {"file": "test-router.sx", "needs": []},
|
|
"render": {"file": "test-render.sx", "needs": ["render-html"]},
|
|
"deps": {"file": "test-deps.sx", "needs": []},
|
|
"engine": {"file": "test-engine.sx", "needs": []},
|
|
"orchestration": {"file": "test-orchestration.sx", "needs": []},
|
|
}
|
|
|
|
specs_to_run = list(SPECS.keys()) if spec_name == "all" else [spec_name]
|
|
|
|
t0 = time.monotonic()
|
|
|
|
# Load framework
|
|
eval_file("test-framework.sx")
|
|
|
|
for sn in specs_to_run:
|
|
spec = SPECS.get(sn)
|
|
if not spec:
|
|
continue
|
|
|
|
# Load module functions from bootstrap
|
|
if sn == "router":
|
|
try:
|
|
from shared.sx.ref.sx_ref import (
|
|
split_path_segments,
|
|
parse_route_pattern,
|
|
match_route_segments,
|
|
match_route,
|
|
find_matching_route,
|
|
make_route_segment,
|
|
)
|
|
env["split-path-segments"] = split_path_segments
|
|
env["parse-route-pattern"] = parse_route_pattern
|
|
env["match-route-segments"] = match_route_segments
|
|
env["match-route"] = match_route
|
|
env["find-matching-route"] = find_matching_route
|
|
env["make-route-segment"] = make_route_segment
|
|
except ImportError:
|
|
eval_file("router.sx")
|
|
elif sn == "deps":
|
|
try:
|
|
from shared.sx.ref.sx_ref import (
|
|
scan_refs, scan_components_from_source,
|
|
transitive_deps, compute_all_deps,
|
|
components_needed, page_component_bundle,
|
|
page_css_classes, scan_io_refs,
|
|
transitive_io_refs, compute_all_io_refs,
|
|
component_pure_p,
|
|
)
|
|
env["scan-refs"] = scan_refs
|
|
env["scan-components-from-source"] = scan_components_from_source
|
|
env["transitive-deps"] = transitive_deps
|
|
env["compute-all-deps"] = compute_all_deps
|
|
env["components-needed"] = components_needed
|
|
env["page-component-bundle"] = page_component_bundle
|
|
env["page-css-classes"] = page_css_classes
|
|
env["scan-io-refs"] = scan_io_refs
|
|
env["transitive-io-refs"] = transitive_io_refs
|
|
env["compute-all-io-refs"] = compute_all_io_refs
|
|
env["component-pure?"] = component_pure_p
|
|
env["test-env"] = lambda: env
|
|
except ImportError:
|
|
eval_file("deps.sx")
|
|
env["test-env"] = lambda: env
|
|
elif sn == "engine":
|
|
try:
|
|
from shared.sx.ref.sx_ref import (
|
|
parse_time, parse_trigger_spec, default_trigger,
|
|
parse_swap_spec, parse_retry_spec, filter_params,
|
|
)
|
|
env["parse-time"] = parse_time
|
|
env["parse-trigger-spec"] = parse_trigger_spec
|
|
env["default-trigger"] = default_trigger
|
|
env["parse-swap-spec"] = parse_swap_spec
|
|
env["parse-retry-spec"] = parse_retry_spec
|
|
env["next-retry-ms"] = lambda cur, cap: min(cur * 2, cap)
|
|
env["filter-params"] = filter_params
|
|
except ImportError:
|
|
eval_file("engine.sx")
|
|
elif sn == "orchestration":
|
|
# Mock platform functions for orchestration tests
|
|
_mock_ts = [1000]
|
|
env["now-ms"] = lambda: _mock_ts[0]
|
|
env["log-info"] = lambda *_a: NIL
|
|
env["log-warn"] = lambda *_a: NIL
|
|
env["execute-action"] = lambda act, pay, ok_cb, _err: (ok_cb(pay), NIL)[-1]
|
|
env["try-rerender-page"] = lambda *_a: NIL
|
|
env["persist-offline-data"] = lambda *_a: NIL
|
|
env["retrieve-offline-data"] = lambda: NIL
|
|
env["dict-delete!"] = lambda d, k: (d.pop(k, None), NIL)[-1] if isinstance(d, dict) else NIL
|
|
# DOM/browser stubs (never called by tests)
|
|
_noop = lambda *_a: NIL
|
|
for stub in [
|
|
"try-parse-json", "dom-dispatch", "dom-query-selector",
|
|
"dom-get-attribute", "dom-set-attribute", "dom-set-text-content",
|
|
"dom-append", "dom-insert-html-adjacent", "dom-remove",
|
|
"dom-outer-html", "dom-inner-html", "dom-create-element",
|
|
"dom-set-inner-html", "dom-morph", "dom-get-tag",
|
|
"dom-query-selector-all", "dom-add-event-listener",
|
|
"dom-set-timeout", "dom-prevent-default", "dom-closest",
|
|
"dom-matches", "dom-get-id", "dom-set-id", "dom-form-data",
|
|
"dom-is-form", "browser-location-href", "browser-push-state",
|
|
"browser-replace-state", "sx-hydrate-elements", "render-to-dom",
|
|
"hoist-head-elements-full", "url-pathname",
|
|
]:
|
|
if stub not in env:
|
|
env[stub] = _noop
|
|
# Load engine (orchestration depends on it)
|
|
try:
|
|
from shared.sx.ref.sx_ref import (
|
|
parse_time, parse_trigger_spec, default_trigger,
|
|
parse_swap_spec, parse_retry_spec, filter_params,
|
|
)
|
|
env["parse-time"] = parse_time
|
|
env["parse-trigger-spec"] = parse_trigger_spec
|
|
env["default-trigger"] = default_trigger
|
|
env["parse-swap-spec"] = parse_swap_spec
|
|
env["parse-retry-spec"] = parse_retry_spec
|
|
env["next-retry-ms"] = lambda cur, cap: min(cur * 2, cap)
|
|
env["filter-params"] = filter_params
|
|
except ImportError:
|
|
eval_file("engine.sx")
|
|
eval_file("orchestration.sx")
|
|
|
|
eval_file(spec["file"])
|
|
|
|
elapsed = round((time.monotonic() - t0) * 1000)
|
|
|
|
result = {
|
|
"server-results": {
|
|
"passed": passed,
|
|
"failed": failed,
|
|
"total": passed + failed,
|
|
"elapsed-ms": elapsed,
|
|
"output": "\n".join(lines),
|
|
"spec": spec_name,
|
|
},
|
|
"framework-source": _read_spec_file("test-framework.sx"),
|
|
}
|
|
|
|
# Include spec sources so :content can reference them from data
|
|
if spec_name == "all":
|
|
result["eval-source"] = _read_spec_file("test-eval.sx")
|
|
result["parser-source"] = _read_spec_file("test-parser.sx")
|
|
result["router-source"] = _read_spec_file("test-router.sx")
|
|
result["render-source"] = _read_spec_file("test-render.sx")
|
|
result["deps-source"] = _read_spec_file("test-deps.sx")
|
|
result["engine-source"] = _read_spec_file("test-engine.sx")
|
|
else:
|
|
spec = SPECS.get(spec_name)
|
|
if spec:
|
|
result["spec-source"] = _read_spec_file(spec["file"])
|
|
|
|
return result
|
|
|
|
|
|
def _data_test_data() -> dict:
|
|
"""Return test data for the client-side data rendering test page.
|
|
|
|
This exercises the Phase 4 data endpoint: server evaluates this
|
|
helper, serializes the result as SX, the client fetches and parses
|
|
it, then renders the page content with these bindings.
|
|
"""
|
|
from datetime import datetime, timezone
|
|
|
|
return {
|
|
"server-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
|
"items": [
|
|
{"label": "Eval", "detail": "Server evaluates :data expression"},
|
|
{"label": "Serialize", "detail": "Result serialized as SX wire format"},
|
|
{"label": "Fetch", "detail": "Client calls resolve-page-data"},
|
|
{"label": "Parse", "detail": "Client parses SX response to dict"},
|
|
{"label": "Render", "detail": "Client merges data into env, renders content"},
|
|
],
|
|
"phase": "Phase 4 — Client Async & IO Bridge",
|
|
"transport": "SX wire format (text/sx)",
|
|
}
|
|
|
|
|
|
async def _streaming_demo_data():
|
|
"""Multi-stream demo — yields three chunks at staggered intervals.
|
|
|
|
Each yield is a dict with _stream_id (matching a ~shared:pages/suspense :id in the
|
|
shell) plus bindings for the :content expression. The streaming
|
|
infrastructure detects the async generator and resolves each suspense
|
|
placeholder as each chunk arrives.
|
|
"""
|
|
import asyncio
|
|
from datetime import datetime, timezone
|
|
|
|
await asyncio.sleep(1)
|
|
yield {
|
|
"stream-id": "stream-fast",
|
|
"stream-label": "Fast API",
|
|
"stream-color": "green",
|
|
"stream-message": "Responded in ~1 second",
|
|
"stream-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
|
}
|
|
|
|
await asyncio.sleep(2) # 3s total
|
|
yield {
|
|
"stream-id": "stream-medium",
|
|
"stream-label": "Database Query",
|
|
"stream-color": "blue",
|
|
"stream-message": "Query completed in ~3 seconds",
|
|
"stream-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
|
}
|
|
|
|
await asyncio.sleep(2) # 5s total
|
|
yield {
|
|
"stream-id": "stream-slow",
|
|
"stream-label": "ML Inference",
|
|
"stream-color": "amber",
|
|
"stream-message": "Model inference completed in ~5 seconds",
|
|
"stream-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
|
}
|
|
|
|
|
|
def _affinity_demo_data() -> dict:
|
|
"""Return affinity analysis for the demo components + page render plans."""
|
|
from shared.sx.jinja_bridge import get_component_env
|
|
from shared.sx.types import Component
|
|
from shared.sx.pages import get_all_pages
|
|
from shared.sx.ref.sx_ref import build_affinity_analysis
|
|
|
|
# I/O edge: extract component data and page render plans
|
|
env = get_component_env()
|
|
demo_names = [
|
|
"~affinity-demo/aff-demo-auto", "~affinity-demo/aff-demo-client", "~affinity-demo/aff-demo-server",
|
|
"~affinity-demo/aff-demo-io-auto", "~affinity-demo/aff-demo-io-client",
|
|
]
|
|
components = []
|
|
for name in demo_names:
|
|
val = env.get(name)
|
|
if isinstance(val, Component):
|
|
components.append({
|
|
"name": name, "affinity": val.affinity,
|
|
"render-target": val.render_target,
|
|
"io-refs": sorted(val.io_refs), "is-pure": val.is_pure,
|
|
})
|
|
|
|
page_plans = []
|
|
for page_def in get_all_pages("sx").values():
|
|
plan = page_def.render_plan
|
|
if plan:
|
|
page_plans.append({
|
|
"name": page_def.name, "path": page_def.path,
|
|
"server-count": len(plan.get("server", [])),
|
|
"client-count": len(plan.get("client", [])),
|
|
"server": plan.get("server", []),
|
|
"client": plan.get("client", []),
|
|
"io-deps": plan.get("io-deps", []),
|
|
})
|
|
|
|
return build_affinity_analysis(components, page_plans)
|
|
|
|
|
|
def _optimistic_demo_data() -> dict:
|
|
"""Return demo data for the optimistic update test page."""
|
|
from datetime import datetime, timezone
|
|
|
|
return {
|
|
"items": [
|
|
{"id": 1, "label": "First item", "status": "confirmed"},
|
|
{"id": 2, "label": "Second item", "status": "confirmed"},
|
|
{"id": 3, "label": "Third item", "status": "confirmed"},
|
|
],
|
|
"server-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
|
}
|
|
|
|
|
|
def _add_demo_item(**kwargs) -> dict:
|
|
"""Action: add a demo item. Returns confirmation with new item."""
|
|
from datetime import datetime, timezone
|
|
import random
|
|
|
|
label = kwargs.get("label", "Untitled")
|
|
return {
|
|
"id": random.randint(100, 9999),
|
|
"label": label,
|
|
"status": "confirmed",
|
|
"added-at": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
|
}
|
|
|
|
|
|
def _prove_data() -> dict:
|
|
"""Run prove.sx against the SX spec — both phases.
|
|
|
|
Phase 1: Translate all define-* forms via z3.sx, verify satisfiability.
|
|
Phase 2: Evaluate algebraic properties via bounded model checking.
|
|
Returns results for the docs page to render.
|
|
"""
|
|
import time
|
|
from shared.sx.parser import parse_all
|
|
from shared.sx.ref.sx_ref import evaluate
|
|
from shared.sx.primitives import all_primitives
|
|
from shared.sx.ref.sx_ref import trampoline as _trampoline, call_lambda as _call_lambda
|
|
|
|
env = all_primitives()
|
|
|
|
ref_dir = _ref_dir()
|
|
for lib in ("z3.sx", "prove.sx"):
|
|
path = __import__("os").path.join(ref_dir, lib)
|
|
with open(path, encoding="utf-8") as f:
|
|
for expr in parse_all(f.read()):
|
|
evaluate(expr, env)
|
|
|
|
# Phase 1: definitional satisfiability
|
|
with open(__import__("os").path.join(ref_dir, "primitives.sx"), encoding="utf-8") as f:
|
|
prim_exprs = parse_all(f.read())
|
|
|
|
t0 = time.monotonic()
|
|
phase1 = _trampoline(_call_lambda(env["prove-file"], [prim_exprs], env))
|
|
phase1_ms = round((time.monotonic() - t0) * 1000)
|
|
|
|
# Phase 2: property-based constraint solving
|
|
t1 = time.monotonic()
|
|
phase2 = _trampoline(_call_lambda(env["prove-all-properties"], [], env))
|
|
phase2_ms = round((time.monotonic() - t1) * 1000)
|
|
|
|
# Flatten Phase 1 results for rendering
|
|
phase1_results = []
|
|
for r in phase1.get("results", []):
|
|
phase1_results.append({
|
|
"name": r.get("name", "?"),
|
|
"status": r.get("status", "?"),
|
|
})
|
|
|
|
# Flatten Phase 2 results for rendering
|
|
phase2_results = []
|
|
total_tested = 0
|
|
for r in phase2.get("results", []):
|
|
tested = r.get("tested", 0)
|
|
skipped = r.get("skipped", 0)
|
|
total_tested += tested
|
|
entry = {
|
|
"name": r.get("name", "?"),
|
|
"status": r.get("status", "?"),
|
|
"tested": tested,
|
|
"skipped": skipped,
|
|
}
|
|
ce = r.get("counterexample")
|
|
if ce:
|
|
entry["counterexample"] = str(ce)
|
|
phase2_results.append(entry)
|
|
|
|
# Generate SMT-LIB sample for a few properties
|
|
props = env["sx-properties"]
|
|
smtlib_samples = []
|
|
for p in props[:3]:
|
|
smt = _trampoline(_call_lambda(env["prove-property-smtlib"], [p], env))
|
|
smtlib_samples.append(smt)
|
|
# One with precondition
|
|
for p in props:
|
|
if p.get("given-expr"):
|
|
smt = _trampoline(_call_lambda(env["prove-property-smtlib"], [p], env))
|
|
smtlib_samples.append(smt)
|
|
break
|
|
|
|
return {
|
|
"phase1-total": phase1.get("total", 0),
|
|
"phase1-sat": phase1.get("sat", 0),
|
|
"phase1-all-sat": phase1.get("all-sat", False),
|
|
"phase1-ms": phase1_ms,
|
|
"phase1-results": phase1_results,
|
|
"phase2-total": phase2.get("total", 0),
|
|
"phase2-verified": phase2.get("verified", 0),
|
|
"phase2-falsified": phase2.get("falsified", 0),
|
|
"phase2-all-verified": phase2.get("all-verified", False),
|
|
"phase2-ms": phase2_ms,
|
|
"phase2-results": phase2_results,
|
|
"phase2-total-tested": total_tested,
|
|
"smtlib-sample": "\n".join(smtlib_samples),
|
|
"prove-source": _read_spec_file("prove.sx"),
|
|
"z3-source": _read_spec_file("z3.sx"),
|
|
}
|
|
|
|
|
|
def _ref_dir() -> str:
|
|
"""Return the path to the SX ref directory."""
|
|
import os
|
|
# Same resolution as _read_spec_file
|
|
ref_dir = os.path.join(os.path.dirname(__file__), "..", "..", "shared", "sx", "ref")
|
|
if not os.path.isdir(ref_dir):
|
|
ref_dir = "/app/shared/sx/ref"
|
|
return ref_dir
|
|
|
|
|
|
def _offline_demo_data() -> dict:
|
|
"""Return demo data for the offline data layer test page."""
|
|
from datetime import datetime, timezone
|
|
|
|
return {
|
|
"notes": [
|
|
{"id": 1, "text": "First note", "created": "2026-03-08T10:00:00Z"},
|
|
{"id": 2, "text": "Second note", "created": "2026-03-08T11:30:00Z"},
|
|
{"id": 3, "text": "Third note", "created": "2026-03-08T14:15:00Z"},
|
|
],
|
|
"server-time": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
|
}
|
|
|
|
|
|
def _page_helpers_demo_data() -> dict:
|
|
"""Run page-helpers.sx functions server-side, return results for comparison with client."""
|
|
import os
|
|
import time
|
|
from shared.sx.parser import parse_all
|
|
from shared.sx.ref.sx_ref import (
|
|
categorize_special_forms, build_reference_data,
|
|
build_attr_detail, build_component_source,
|
|
build_routing_analysis,
|
|
)
|
|
|
|
ref_dir = _ref_dir()
|
|
results = {}
|
|
|
|
# 1. categorize-special-forms
|
|
t0 = time.monotonic()
|
|
with open(os.path.join(ref_dir, "special-forms.sx")) as f:
|
|
sf_exprs = parse_all(f.read())
|
|
sf_result = categorize_special_forms(sf_exprs)
|
|
sf_ms = round((time.monotonic() - t0) * 1000, 1)
|
|
sf_summary = {cat: len(forms) for cat, forms in sf_result.items()}
|
|
results["sf-categories"] = sf_summary
|
|
results["sf-total"] = sum(sf_summary.values())
|
|
results["sf-ms"] = sf_ms
|
|
|
|
# 2. build-reference-data
|
|
from content.pages import REQUEST_ATTRS, ATTR_DETAILS
|
|
t1 = time.monotonic()
|
|
ref_result = build_reference_data("attributes", {
|
|
"req-attrs": [list(t) for t in REQUEST_ATTRS[:5]],
|
|
"beh-attrs": [], "uniq-attrs": [],
|
|
}, list(ATTR_DETAILS.keys()))
|
|
ref_ms = round((time.monotonic() - t1) * 1000, 1)
|
|
results["ref-sample"] = ref_result.get("req-attrs", [])[:3]
|
|
results["ref-ms"] = ref_ms
|
|
|
|
# 3. build-attr-detail
|
|
t2 = time.monotonic()
|
|
detail = ATTR_DETAILS.get("sx-get")
|
|
attr_result = build_attr_detail("sx-get", detail)
|
|
attr_ms = round((time.monotonic() - t2) * 1000, 1)
|
|
results["attr-result"] = attr_result
|
|
results["attr-ms"] = attr_ms
|
|
|
|
# 4. build-component-source
|
|
t3 = time.monotonic()
|
|
comp_result = build_component_source({
|
|
"type": "component", "name": "~demo-card",
|
|
"params": ["title", "subtitle"],
|
|
"has-children": True,
|
|
"body-sx": "(div :class \"card\"\n (h2 title)\n (when subtitle (p subtitle))\n children)",
|
|
"affinity": "auto",
|
|
})
|
|
comp_ms = round((time.monotonic() - t3) * 1000, 1)
|
|
results["comp-source"] = comp_result
|
|
results["comp-ms"] = comp_ms
|
|
|
|
# 5. build-routing-analysis
|
|
t4 = time.monotonic()
|
|
routing_result = build_routing_analysis([
|
|
{"name": "home", "path": "/", "has-data": False, "content-src": "(~home-content)"},
|
|
{"name": "dashboard", "path": "/dash", "has-data": True, "content-src": "(~dashboard)"},
|
|
{"name": "about", "path": "/about", "has-data": False, "content-src": "(~about-content)"},
|
|
{"name": "settings", "path": "/settings", "has-data": True, "content-src": "(~settings)"},
|
|
])
|
|
routing_ms = round((time.monotonic() - t4) * 1000, 1)
|
|
results["routing-result"] = routing_result
|
|
results["routing-ms"] = routing_ms
|
|
|
|
# Total
|
|
results["server-total-ms"] = round(sf_ms + ref_ms + attr_ms + comp_ms + routing_ms, 1)
|
|
|
|
# Pass raw inputs for client-side island (serialized as data-sx-state)
|
|
results["sf-source"] = open(os.path.join(ref_dir, "special-forms.sx")).read()
|
|
results["attr-detail"] = detail
|
|
results["req-attrs"] = [list(t) for t in REQUEST_ATTRS[:5]]
|
|
results["attr-keys"] = list(ATTR_DETAILS.keys())
|
|
|
|
return results
|