The htmx-boot-subtree! function (defined in lib/hyperscript/htmx.sx) was never loaded in the browser because hs-htmx.sx wasn't in the bundle or compile-modules lists. Added to: - bundle.sh: copy htmx.sx as hs-htmx.sx to dist - compile-modules.js: compile to hs-htmx.sxbc, add to deps and lazy list This was the root cause of "Load Content" button not working — hx-* attributes were never activated because htmx-boot-subtree! was undefined. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
511 lines
18 KiB
JavaScript
511 lines
18 KiB
JavaScript
#!/usr/bin/env node
|
|
/**
|
|
* compile-modules.js — Pre-compile .sx files to bytecode s-expressions.
|
|
*
|
|
* Uses the native OCaml sx_server binary for compilation (~5x faster than
|
|
* the js_of_ocaml kernel). Sends source via the blob protocol, receives
|
|
* compiled bytecode as SX text.
|
|
*
|
|
* Usage: node compile-modules.js [dist-dir]
|
|
*/
|
|
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
const crypto = require('crypto');
|
|
const { execSync, spawnSync } = require('child_process');
|
|
|
|
const distDir = process.argv[2] || path.join(__dirname, 'dist');
|
|
const sxDir = path.join(distDir, 'sx');
|
|
const projectRoot = path.resolve(__dirname, '..', '..', '..');
|
|
|
|
if (!fs.existsSync(sxDir)) {
|
|
console.error('sx dir not found:', sxDir);
|
|
process.exit(1);
|
|
}
|
|
|
|
// Sync source .sx files to dist/sx/ before compiling.
|
|
// Source locations: spec/ for core, lib/ for compiler/vm, web/ and web/lib/ for web stack.
|
|
const SOURCE_MAP = {
|
|
// spec/
|
|
'render.sx': 'spec/render.sx',
|
|
'core-signals.sx': 'spec/signals.sx',
|
|
// lib/
|
|
'bytecode.sx': 'lib/bytecode.sx', 'compiler.sx': 'lib/compiler.sx',
|
|
'vm.sx': 'lib/vm.sx', 'freeze.sx': 'lib/freeze.sx',
|
|
'highlight.sx': 'lib/highlight.sx',
|
|
// web/lib/
|
|
'dom.sx': 'web/lib/dom.sx', 'browser.sx': 'web/lib/browser.sx',
|
|
// web/
|
|
'signals.sx': 'web/signals.sx', 'deps.sx': 'web/deps.sx',
|
|
'router.sx': 'web/router.sx', 'page-helpers.sx': 'web/page-helpers.sx',
|
|
'adapter-html.sx': 'web/adapter-html.sx', 'adapter-sx.sx': 'web/adapter-sx.sx',
|
|
'adapter-dom.sx': 'web/adapter-dom.sx',
|
|
'boot-helpers.sx': 'web/lib/boot-helpers.sx',
|
|
'hypersx.sx': 'web/hypersx.sx',
|
|
'harness.sx': 'spec/harness.sx', 'harness-reactive.sx': 'web/harness-reactive.sx',
|
|
'harness-web.sx': 'web/harness-web.sx',
|
|
'engine.sx': 'web/engine.sx', 'orchestration.sx': 'web/orchestration.sx',
|
|
'boot.sx': 'web/boot.sx',
|
|
'tw-layout.sx': 'web/tw-layout.sx', 'tw-type.sx': 'web/tw-type.sx', 'tw.sx': 'web/tw.sx',
|
|
'text-layout.sx': 'lib/text-layout.sx',
|
|
};
|
|
let synced = 0;
|
|
for (const [dist, src] of Object.entries(SOURCE_MAP)) {
|
|
const srcPath = path.join(projectRoot, src);
|
|
const dstPath = path.join(sxDir, dist);
|
|
if (fs.existsSync(srcPath)) {
|
|
const srcContent = fs.readFileSync(srcPath);
|
|
const dstExists = fs.existsSync(dstPath);
|
|
if (!dstExists || !fs.readFileSync(dstPath).equals(srcContent)) {
|
|
fs.writeFileSync(dstPath, srcContent);
|
|
synced++;
|
|
}
|
|
}
|
|
}
|
|
if (synced > 0) console.log('Synced ' + synced + ' source files to dist/sx/');
|
|
|
|
// Find the native OCaml binary
|
|
const binPaths = [
|
|
path.join(__dirname, '..', '_build', 'default', 'bin', 'sx_server.exe'),
|
|
'/app/bin/sx_server',
|
|
];
|
|
const binPath = binPaths.find(p => fs.existsSync(p));
|
|
if (!binPath) {
|
|
console.error('sx_server binary not found at:', binPaths.join(', '));
|
|
process.exit(1);
|
|
}
|
|
|
|
const FILES = [
|
|
'render.sx', 'core-signals.sx', 'signals.sx', 'deps.sx', 'router.sx',
|
|
'page-helpers.sx', 'freeze.sx', 'bytecode.sx', 'compiler.sx', 'vm.sx',
|
|
'dom.sx', 'browser.sx', 'adapter-html.sx', 'adapter-sx.sx', 'adapter-dom.sx',
|
|
'tw-layout.sx', 'tw-type.sx', 'tw.sx',
|
|
'text-layout.sx',
|
|
'boot-helpers.sx', 'hypersx.sx', 'harness.sx', 'harness-reactive.sx',
|
|
'harness-web.sx', 'engine.sx', 'orchestration.sx',
|
|
// Hyperscript modules — loaded on demand via transparent lazy loader
|
|
'hs-tokenizer.sx', 'hs-parser.sx', 'hs-compiler.sx', 'hs-runtime.sx',
|
|
'hs-integration.sx', 'hs-htmx.sx',
|
|
'boot.sx',
|
|
];
|
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Build the full input script — all commands in one batch
|
|
// ---------------------------------------------------------------------------
|
|
|
|
const t0 = Date.now();
|
|
console.log('Building compilation script...');
|
|
|
|
let epoch = 1;
|
|
let script = '';
|
|
|
|
// Load compiler
|
|
script += `(epoch ${epoch++})\n(load "lib/compiler.sx")\n`;
|
|
|
|
// JIT pre-compile the compiler (skipped: vm-compile-adapter hangs with
|
|
// define-library wrappers in some lambda JIT paths. Compilation still
|
|
// works via CEK — just ~2x slower per file.)
|
|
// script += `(epoch ${epoch++})\n(vm-compile-adapter)\n`;
|
|
|
|
// Load all modules into env
|
|
for (const file of FILES) {
|
|
const src = fs.readFileSync(path.join(sxDir, file), 'utf8');
|
|
const buf = Buffer.from(src, 'utf8');
|
|
script += `(epoch ${epoch++})\n(eval-blob)\n(blob ${buf.length})\n`;
|
|
script += src + '\n';
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Strip define-library wrapper for bytecode compilation.
|
|
//
|
|
// Keeps (import ...) forms — the compiler emits OP_PERFORM for these, enabling
|
|
// lazy loading: when the VM hits an import for an unloaded library, it suspends
|
|
// to the JS platform which fetches the library on demand.
|
|
//
|
|
// Strips define-library header (name, export) and (begin ...) wrapper, leaving
|
|
// the body defines + import instructions as top-level forms.
|
|
// ---------------------------------------------------------------------------
|
|
|
|
function stripLibraryWrapper(source) {
|
|
// Paren-aware stripping: find (begin ...) inside (define-library ...), extract body.
|
|
// Keep top-level (import ...) forms outside the define-library.
|
|
|
|
// Find (define-library at the start
|
|
const dlMatch = source.match(/^[\s\S]*?\(define-library\b/);
|
|
if (!dlMatch) return source; // no define-library, return as-is
|
|
|
|
// Find the (begin that opens the body — skip past (export ...) using paren counting
|
|
const afterDL = dlMatch[0].length;
|
|
let pos = afterDL;
|
|
let foundBegin = -1;
|
|
|
|
while (pos < source.length) {
|
|
// Skip whitespace and comments
|
|
while (pos < source.length && /[\s]/.test(source[pos])) pos++;
|
|
if (pos >= source.length) break;
|
|
if (source[pos] === ';') { // skip comment line
|
|
while (pos < source.length && source[pos] !== '\n') pos++;
|
|
continue;
|
|
}
|
|
|
|
// Check for (begin
|
|
if (source.startsWith('(begin', pos)) {
|
|
foundBegin = pos;
|
|
break;
|
|
}
|
|
|
|
// Skip balanced sexp (the library name and export list)
|
|
if (source[pos] === '(') {
|
|
let depth = 1;
|
|
pos++;
|
|
while (pos < source.length && depth > 0) {
|
|
if (source[pos] === '(') depth++;
|
|
else if (source[pos] === ')') depth--;
|
|
else if (source[pos] === '"') { // skip strings
|
|
pos++;
|
|
while (pos < source.length && source[pos] !== '"') {
|
|
if (source[pos] === '\\') pos++;
|
|
pos++;
|
|
}
|
|
} else if (source[pos] === ';') { // skip comments
|
|
while (pos < source.length && source[pos] !== '\n') pos++;
|
|
continue;
|
|
}
|
|
pos++;
|
|
}
|
|
} else {
|
|
// Skip atom
|
|
while (pos < source.length && !/[\s()]/.test(source[pos])) pos++;
|
|
}
|
|
}
|
|
|
|
if (foundBegin === -1) return source; // no (begin found
|
|
|
|
// Find the body inside (begin ...) — skip "(begin" + optional whitespace
|
|
let bodyStart = foundBegin + 6; // len("(begin") = 6
|
|
// Skip optional newline/whitespace after (begin
|
|
while (bodyStart < source.length && /[\s]/.test(source[bodyStart])) bodyStart++;
|
|
|
|
// Find matching close of (begin ...) using paren counting from foundBegin
|
|
pos = foundBegin + 1; // after opening (
|
|
let depth = 1;
|
|
while (pos < source.length && depth > 0) {
|
|
if (source[pos] === '(') depth++;
|
|
else if (source[pos] === ')') depth--;
|
|
else if (source[pos] === '"') {
|
|
pos++;
|
|
while (pos < source.length && source[pos] !== '"') {
|
|
if (source[pos] === '\\') pos++;
|
|
pos++;
|
|
}
|
|
} else if (source[pos] === ';') {
|
|
while (pos < source.length && source[pos] !== '\n') pos++;
|
|
continue;
|
|
}
|
|
if (depth > 0) pos++;
|
|
}
|
|
const beginClose = pos; // position of closing ) for (begin ...)
|
|
|
|
// Extract body (everything between (begin and its closing paren)
|
|
const body = source.slice(bodyStart, beginClose);
|
|
|
|
// Find any (import ...) forms AFTER the define-library
|
|
// The define-library's closing paren is right after begin's
|
|
let dlClose = beginClose + 1;
|
|
while (dlClose < source.length && source[dlClose] !== ')') {
|
|
if (source[dlClose] === ';') {
|
|
while (dlClose < source.length && source[dlClose] !== '\n') dlClose++;
|
|
}
|
|
dlClose++;
|
|
}
|
|
dlClose++; // past the closing )
|
|
|
|
const afterDLForm = source.slice(dlClose);
|
|
|
|
return body + '\n' + afterDLForm;
|
|
}
|
|
|
|
// Compile each module (stripped of define-library/import wrappers)
|
|
const compileEpochs = {};
|
|
for (const file of FILES) {
|
|
const rawSrc = fs.readFileSync(path.join(sxDir, file), 'utf8');
|
|
const src = stripLibraryWrapper(rawSrc);
|
|
const buf = Buffer.from(src, 'utf8');
|
|
const ep = epoch++;
|
|
compileEpochs[ep] = file;
|
|
script += `(epoch ${ep})\n(compile-blob)\n(blob ${buf.length})\n`;
|
|
script += src + '\n';
|
|
}
|
|
|
|
// Write script to temp file and pipe to server
|
|
const tmpFile = '/tmp/sx-compile-script.txt';
|
|
fs.writeFileSync(tmpFile, script);
|
|
|
|
console.log('Running native OCaml compiler (' + FILES.length + ' files)...');
|
|
const t1 = Date.now();
|
|
|
|
const result = spawnSync(binPath, [], {
|
|
input: fs.readFileSync(tmpFile),
|
|
maxBuffer: 100 * 1024 * 1024, // 100MB
|
|
timeout: 600000, // 10 min
|
|
stdio: ['pipe', 'pipe', 'pipe'],
|
|
});
|
|
|
|
if (result.error) {
|
|
console.error('Server error:', result.error);
|
|
process.exit(1);
|
|
}
|
|
|
|
const stderr = result.stderr.toString();
|
|
process.stderr.write(stderr);
|
|
|
|
// Use latin1 to preserve byte positions (UTF-8 multi-byte chars stay as-is in length)
|
|
const stdoutBuf = result.stdout;
|
|
const stdout = stdoutBuf.toString('latin1');
|
|
const dt = Date.now() - t1;
|
|
console.log('Server finished in ' + Math.round(dt / 1000) + 's');
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Parse responses — extract compiled bytecode for each file
|
|
// ---------------------------------------------------------------------------
|
|
|
|
// Parse responses — stdout is latin1 so byte positions match string positions
|
|
let compiled = 0, skipped = 0;
|
|
let pos = 0;
|
|
|
|
function nextLine() {
|
|
const nl = stdout.indexOf('\n', pos);
|
|
if (nl === -1) return null;
|
|
const line = stdout.slice(pos, nl);
|
|
pos = nl + 1;
|
|
return line;
|
|
}
|
|
|
|
while (pos < stdout.length) {
|
|
const line = nextLine();
|
|
if (line === null) break;
|
|
const trimmed = line.trim();
|
|
|
|
// ok-len EPOCH LEN — read LEN bytes as value
|
|
const lenMatch = trimmed.match(/^\(ok-len (\d+) (\d+)\)$/);
|
|
if (lenMatch) {
|
|
const ep = parseInt(lenMatch[1]);
|
|
const len = parseInt(lenMatch[2]);
|
|
// Read exactly len bytes — latin1 encoding preserves byte positions
|
|
const rawValue = stdout.slice(pos, pos + len);
|
|
// Re-encode to proper UTF-8
|
|
const value = Buffer.from(rawValue, 'latin1').toString('utf8');
|
|
pos += len;
|
|
// skip trailing newline
|
|
if (pos < stdout.length && stdout.charCodeAt(pos) === 10) pos++;
|
|
|
|
const file = compileEpochs[ep];
|
|
if (file) {
|
|
if (value === 'nil' || value.startsWith('(error')) {
|
|
console.error(' SKIP', file, '—', value.slice(0, 60));
|
|
skipped++;
|
|
} else {
|
|
const hash = crypto.createHash('sha256')
|
|
.update(fs.readFileSync(path.join(sxDir, file), 'utf8'))
|
|
.digest('hex').slice(0, 16);
|
|
|
|
const sxbc = '(sxbc 1 "' + hash + '"\n (code\n ' +
|
|
value.replace(/^\{/, '').replace(/\}$/, '').trim() + '))\n';
|
|
|
|
const outPath = path.join(sxDir, file.replace(/\.sx$/, '.sxbc'));
|
|
fs.writeFileSync(outPath, sxbc);
|
|
|
|
const size = fs.statSync(outPath).size;
|
|
console.log(' ok', file, '→', Math.round(size / 1024) + 'K');
|
|
compiled++;
|
|
}
|
|
}
|
|
continue;
|
|
}
|
|
|
|
// Simple ok or error — skip
|
|
if (trimmed.match(/^\(ok \d+/) || trimmed.match(/^\(error \d+/)) {
|
|
if (trimmed.match(/^\(error/)) {
|
|
const epMatch = trimmed.match(/^\(error (\d+)/);
|
|
if (epMatch) {
|
|
const ep = parseInt(epMatch[1]);
|
|
const file = compileEpochs[ep];
|
|
if (file) {
|
|
console.error(' SKIP', file, '—', trimmed.slice(0, 80));
|
|
skipped++;
|
|
}
|
|
}
|
|
}
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// Copy compiled files to shared/static/wasm/sx/ for web serving
|
|
const staticSxDir = path.resolve(__dirname, '..', '..', '..', 'shared', 'static', 'wasm', 'sx');
|
|
if (fs.existsSync(staticSxDir)) {
|
|
let copied = 0;
|
|
for (const file of FILES) {
|
|
// Copy bytecode
|
|
for (const ext of ['.sxbc', '.sxbc.json']) {
|
|
const src = path.join(sxDir, file.replace(/\.sx$/, ext));
|
|
const dst = path.join(staticSxDir, file.replace(/\.sx$/, ext));
|
|
if (fs.existsSync(src)) {
|
|
fs.copyFileSync(src, dst);
|
|
copied++;
|
|
}
|
|
}
|
|
// Also sync .sx source files (fallback when .sxbc missing)
|
|
const sxSrc = path.join(sxDir, file);
|
|
const sxDst = path.join(staticSxDir, file);
|
|
if (fs.existsSync(sxSrc) && !fs.lstatSync(sxSrc).isSymbolicLink()) {
|
|
fs.copyFileSync(sxSrc, sxDst);
|
|
copied++;
|
|
}
|
|
}
|
|
console.log('Copied', copied, 'files to', staticSxDir);
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Generate module-manifest.json — dependency graph for lazy loading
|
|
// ---------------------------------------------------------------------------
|
|
|
|
console.log('Generating module manifest...');
|
|
|
|
// Extract library name from (define-library (namespace name) ...) in source
|
|
function extractLibraryName(source) {
|
|
const m = source.match(/\(define-library\s+(\([^)]+\))/);
|
|
return m ? m[1] : null;
|
|
}
|
|
|
|
// Extract top-level (import (namespace name)) deps from source
|
|
// Only matches imports BEFORE define-library (dependency declarations)
|
|
function extractImportDeps(source) {
|
|
const deps = [];
|
|
const lines = source.split('\n');
|
|
for (const line of lines) {
|
|
// Stop at define-library — imports after that are self-imports
|
|
if (line.startsWith('(define-library')) break;
|
|
const m = line.match(/^\(import\s+(\([^)]+\))\)/);
|
|
if (m) deps.push(m[1]);
|
|
}
|
|
return deps;
|
|
}
|
|
|
|
// Extract exported symbol names from (export name1 name2 ...) clause
|
|
function extractExports(source) {
|
|
const exports = [];
|
|
const m = source.match(/\(export\s+([\s\S]*?)\)\s*\(/);
|
|
if (!m) return exports;
|
|
// Parse symbol names from the export list (skip keywords, nested forms)
|
|
const tokens = m[1].split(/\s+/).filter(t => t && !t.startsWith(':') && !t.startsWith('(') && !t.startsWith(')'));
|
|
for (const t of tokens) {
|
|
const clean = t.replace(/[()]/g, '');
|
|
if (clean && !clean.startsWith(':')) exports.push(clean);
|
|
}
|
|
return exports;
|
|
}
|
|
|
|
// Flatten library spec: "(sx dom)" → "sx dom"
|
|
function libKey(spec) {
|
|
return spec.replace(/^\(/, '').replace(/\)$/, '');
|
|
}
|
|
|
|
// Extract top-level (define name ...) symbols from a non-library file
|
|
function extractDefines(source) {
|
|
const names = [];
|
|
const re = /^\(define\s+(\S+)/gm;
|
|
let m;
|
|
while ((m = re.exec(source)) !== null) {
|
|
const name = m[1];
|
|
if (name && !name.startsWith('(') && !name.startsWith(':')) names.push(name);
|
|
}
|
|
return names;
|
|
}
|
|
|
|
const manifest = {};
|
|
let entryFile = null;
|
|
|
|
for (const file of FILES) {
|
|
const srcPath = path.join(sxDir, file);
|
|
if (!fs.existsSync(srcPath)) continue;
|
|
const src = fs.readFileSync(srcPath, 'utf8');
|
|
const libName = extractLibraryName(src);
|
|
const deps = extractImportDeps(src);
|
|
const sxbcFile = file.replace(/\.sx$/, '.sxbc');
|
|
|
|
if (libName) {
|
|
const exports = extractExports(src);
|
|
manifest[libKey(libName)] = {
|
|
file: sxbcFile,
|
|
deps: deps.map(libKey),
|
|
exports: exports,
|
|
};
|
|
} else if (deps.length > 0) {
|
|
// Entry point (no define-library, has imports)
|
|
entryFile = { file: sxbcFile, deps: deps.map(libKey) };
|
|
} else {
|
|
// Non-library file (e.g. hyperscript modules) — extract top-level defines
|
|
// as exports so the transparent lazy loader can resolve symbols to files.
|
|
const defines = extractDefines(src);
|
|
if (defines.length > 0) {
|
|
const key = file.replace(/\.sx$/, '');
|
|
// HS modules form a dependency chain — loading one loads all predecessors.
|
|
const HS_DEPS = {
|
|
'hs-parser': ['hs-tokenizer'],
|
|
'hs-compiler': ['hs-tokenizer', 'hs-parser'],
|
|
'hs-runtime': ['hs-tokenizer', 'hs-parser', 'hs-compiler'],
|
|
'hs-integration': ['hs-tokenizer', 'hs-parser', 'hs-compiler', 'hs-runtime'],
|
|
'hs-htmx': ['hs-tokenizer', 'hs-parser', 'hs-compiler', 'hs-runtime', 'hs-integration'],
|
|
};
|
|
manifest[key] = {
|
|
file: sxbcFile,
|
|
deps: HS_DEPS[key] || [],
|
|
exports: defines,
|
|
};
|
|
}
|
|
}
|
|
}
|
|
|
|
if (entryFile) {
|
|
// Partition entry deps into eager (needed at boot) and lazy (loaded on demand).
|
|
// Lazy deps are fetched by the suspension handler when the kernel requests them.
|
|
const LAZY_ENTRY_DEPS = new Set([
|
|
'sx bytecode', // JIT-only — enable-jit! runs after boot
|
|
]);
|
|
const eagerDeps = entryFile.deps.filter(d => !LAZY_ENTRY_DEPS.has(d));
|
|
const lazyDeps = entryFile.deps.filter(d => LAZY_ENTRY_DEPS.has(d));
|
|
// Hyperscript modules aren't define-library, so not auto-detected as deps.
|
|
// Load them lazily after boot — eager loading breaks the boot sequence.
|
|
const HS_LAZY = ['hs-tokenizer', 'hs-parser', 'hs-compiler', 'hs-runtime', 'hs-integration', 'hs-htmx'];
|
|
for (const m of HS_LAZY) {
|
|
if (manifest[m] && !lazyDeps.includes(m)) lazyDeps.push(m);
|
|
}
|
|
// Text layout library — loaded eagerly for Pretext island
|
|
if (manifest['sx text-layout'] && !eagerDeps.includes('sx text-layout')) {
|
|
eagerDeps.push('sx text-layout');
|
|
}
|
|
manifest['_entry'] = {
|
|
file: entryFile.file,
|
|
deps: eagerDeps,
|
|
};
|
|
if (lazyDeps.length > 0) {
|
|
manifest['_entry'].lazy_deps = lazyDeps;
|
|
}
|
|
}
|
|
|
|
const manifestPath = path.join(sxDir, 'module-manifest.json');
|
|
fs.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2) + '\n');
|
|
console.log(' Wrote', manifestPath, '(' + Object.keys(manifest).length + ' modules)');
|
|
|
|
// Copy manifest to static dir
|
|
if (fs.existsSync(staticSxDir)) {
|
|
fs.copyFileSync(manifestPath, path.join(staticSxDir, 'module-manifest.json'));
|
|
console.log(' Copied manifest to', staticSxDir);
|
|
}
|
|
|
|
const total = Date.now() - t0;
|
|
console.log('Done:', compiled, 'compiled,', skipped, 'skipped in', Math.round(total / 1000) + 's');
|
|
|
|
fs.unlinkSync(tmpFile);
|