Bytecode compiler now emits OP_PERFORM for (import ...) and compiles
(define-library ...) bodies. The VM stores the import request in
globals["__io_request"] and stops the run loop — no exceptions needed.
vm-execute-module returns a suspension dict, vm-resume-module continues.
Browser: sx_browser.ml detects suspension dicts from execute_module and
returns JS {suspended, op, request, resume} objects. The sx-platform.js
while loop handles cascading suspensions via handleImportSuspension.
13 modules load via .sxbc bytecode in 226ms (manifest-driven), both
islands hydrate, all handlers wired. 2650/2650 tests pass including
6 new vm-import-suspension tests.
Also: consolidated sx-platform-2.js → sx-platform.js, fixed
vm-execute-module missing code-from-value call, fixed bootstrap.py
protocol registry transpiler issues.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
340 lines
11 KiB
JavaScript
340 lines
11 KiB
JavaScript
#!/usr/bin/env node
|
|
/**
|
|
* compile-modules.js — Pre-compile .sx files to bytecode s-expressions.
|
|
*
|
|
* Uses the native OCaml sx_server binary for compilation (~5x faster than
|
|
* the js_of_ocaml kernel). Sends source via the blob protocol, receives
|
|
* compiled bytecode as SX text.
|
|
*
|
|
* Usage: node compile-modules.js [dist-dir]
|
|
*/
|
|
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
const crypto = require('crypto');
|
|
const { execSync, spawnSync } = require('child_process');
|
|
|
|
const distDir = process.argv[2] || path.join(__dirname, 'dist');
|
|
const sxDir = path.join(distDir, 'sx');
|
|
|
|
if (!fs.existsSync(sxDir)) {
|
|
console.error('sx dir not found:', sxDir);
|
|
process.exit(1);
|
|
}
|
|
|
|
// Find the native OCaml binary
|
|
const binPaths = [
|
|
path.join(__dirname, '..', '_build', 'default', 'bin', 'sx_server.exe'),
|
|
'/app/bin/sx_server',
|
|
];
|
|
const binPath = binPaths.find(p => fs.existsSync(p));
|
|
if (!binPath) {
|
|
console.error('sx_server binary not found at:', binPaths.join(', '));
|
|
process.exit(1);
|
|
}
|
|
|
|
const FILES = [
|
|
'render.sx', 'core-signals.sx', 'signals.sx', 'deps.sx', 'router.sx',
|
|
'page-helpers.sx', 'freeze.sx', 'bytecode.sx', 'compiler.sx', 'vm.sx',
|
|
'dom.sx', 'browser.sx', 'adapter-html.sx', 'adapter-sx.sx', 'adapter-dom.sx',
|
|
'tw-layout.sx', 'tw-type.sx', 'tw.sx',
|
|
'boot-helpers.sx', 'hypersx.sx', 'harness.sx', 'harness-reactive.sx',
|
|
'harness-web.sx', 'engine.sx', 'orchestration.sx', 'boot.sx',
|
|
];
|
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Build the full input script — all commands in one batch
|
|
// ---------------------------------------------------------------------------
|
|
|
|
const t0 = Date.now();
|
|
console.log('Building compilation script...');
|
|
|
|
let epoch = 1;
|
|
let script = '';
|
|
|
|
// Load compiler
|
|
script += `(epoch ${epoch++})\n(load "lib/compiler.sx")\n`;
|
|
|
|
// JIT pre-compile the compiler (skipped: vm-compile-adapter hangs with
|
|
// define-library wrappers in some lambda JIT paths. Compilation still
|
|
// works via CEK — just ~2x slower per file.)
|
|
// script += `(epoch ${epoch++})\n(vm-compile-adapter)\n`;
|
|
|
|
// Load all modules into env
|
|
for (const file of FILES) {
|
|
const src = fs.readFileSync(path.join(sxDir, file), 'utf8');
|
|
const buf = Buffer.from(src, 'utf8');
|
|
script += `(epoch ${epoch++})\n(eval-blob)\n(blob ${buf.length})\n`;
|
|
script += src + '\n';
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Strip define-library wrapper for bytecode compilation.
|
|
//
|
|
// Keeps (import ...) forms — the compiler emits OP_PERFORM for these, enabling
|
|
// lazy loading: when the VM hits an import for an unloaded library, it suspends
|
|
// to the JS platform which fetches the library on demand.
|
|
//
|
|
// Strips define-library header (name, export) and (begin ...) wrapper, leaving
|
|
// the body defines + import instructions as top-level forms.
|
|
// ---------------------------------------------------------------------------
|
|
|
|
function stripLibraryWrapper(source) {
|
|
// Line-based stripping: unwrap (define-library ... (begin BODY)), keep (import ...).
|
|
const lines = source.split('\n');
|
|
const result = [];
|
|
let skip = false; // inside header region (define-library, export)
|
|
|
|
for (let i = 0; i < lines.length; i++) {
|
|
const line = lines[i];
|
|
const trimmed = line.trim();
|
|
|
|
// Skip (define-library ...) header lines until (begin
|
|
if (trimmed.startsWith('(define-library ')) { skip = true; continue; }
|
|
if (skip && trimmed.startsWith('(export')) { continue; }
|
|
if (skip && trimmed.match(/^\(begin/)) { skip = false; continue; }
|
|
if (skip) continue;
|
|
|
|
// Skip closing )) of define-library — line is just ) or )) optionally with comments
|
|
if (trimmed.match(/^\)+(\s*;.*)?$/)) {
|
|
// Check if this is the end-of-define-library closer (only `)` chars + optional comment)
|
|
// vs a regular body closer like ` )` inside a nested form
|
|
// Only skip if at column 0 (not indented = top-level closer)
|
|
if (line.match(/^\)/)) continue;
|
|
}
|
|
|
|
// Skip standalone comments that are just structural markers
|
|
if (trimmed.match(/^;;\s*(end define-library|Re-export)/)) continue;
|
|
|
|
result.push(line);
|
|
}
|
|
|
|
return result.join('\n');
|
|
}
|
|
|
|
// Compile each module (stripped of define-library/import wrappers)
|
|
const compileEpochs = {};
|
|
for (const file of FILES) {
|
|
const rawSrc = fs.readFileSync(path.join(sxDir, file), 'utf8');
|
|
const src = stripLibraryWrapper(rawSrc);
|
|
const buf = Buffer.from(src, 'utf8');
|
|
const ep = epoch++;
|
|
compileEpochs[ep] = file;
|
|
script += `(epoch ${ep})\n(compile-blob)\n(blob ${buf.length})\n`;
|
|
script += src + '\n';
|
|
}
|
|
|
|
// Write script to temp file and pipe to server
|
|
const tmpFile = '/tmp/sx-compile-script.txt';
|
|
fs.writeFileSync(tmpFile, script);
|
|
|
|
console.log('Running native OCaml compiler (' + FILES.length + ' files)...');
|
|
const t1 = Date.now();
|
|
|
|
const result = spawnSync(binPath, [], {
|
|
input: fs.readFileSync(tmpFile),
|
|
maxBuffer: 100 * 1024 * 1024, // 100MB
|
|
timeout: 600000, // 10 min
|
|
stdio: ['pipe', 'pipe', 'pipe'],
|
|
});
|
|
|
|
if (result.error) {
|
|
console.error('Server error:', result.error);
|
|
process.exit(1);
|
|
}
|
|
|
|
const stderr = result.stderr.toString();
|
|
process.stderr.write(stderr);
|
|
|
|
// Use latin1 to preserve byte positions (UTF-8 multi-byte chars stay as-is in length)
|
|
const stdoutBuf = result.stdout;
|
|
const stdout = stdoutBuf.toString('latin1');
|
|
const dt = Date.now() - t1;
|
|
console.log('Server finished in ' + Math.round(dt / 1000) + 's');
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Parse responses — extract compiled bytecode for each file
|
|
// ---------------------------------------------------------------------------
|
|
|
|
// Parse responses — stdout is latin1 so byte positions match string positions
|
|
let compiled = 0, skipped = 0;
|
|
let pos = 0;
|
|
|
|
function nextLine() {
|
|
const nl = stdout.indexOf('\n', pos);
|
|
if (nl === -1) return null;
|
|
const line = stdout.slice(pos, nl);
|
|
pos = nl + 1;
|
|
return line;
|
|
}
|
|
|
|
while (pos < stdout.length) {
|
|
const line = nextLine();
|
|
if (line === null) break;
|
|
const trimmed = line.trim();
|
|
|
|
// ok-len EPOCH LEN — read LEN bytes as value
|
|
const lenMatch = trimmed.match(/^\(ok-len (\d+) (\d+)\)$/);
|
|
if (lenMatch) {
|
|
const ep = parseInt(lenMatch[1]);
|
|
const len = parseInt(lenMatch[2]);
|
|
// Read exactly len bytes — latin1 encoding preserves byte positions
|
|
const rawValue = stdout.slice(pos, pos + len);
|
|
// Re-encode to proper UTF-8
|
|
const value = Buffer.from(rawValue, 'latin1').toString('utf8');
|
|
pos += len;
|
|
// skip trailing newline
|
|
if (pos < stdout.length && stdout.charCodeAt(pos) === 10) pos++;
|
|
|
|
const file = compileEpochs[ep];
|
|
if (file) {
|
|
if (value === 'nil' || value.startsWith('(error')) {
|
|
console.error(' SKIP', file, '—', value.slice(0, 60));
|
|
skipped++;
|
|
} else {
|
|
const hash = crypto.createHash('sha256')
|
|
.update(fs.readFileSync(path.join(sxDir, file), 'utf8'))
|
|
.digest('hex').slice(0, 16);
|
|
|
|
const sxbc = '(sxbc 1 "' + hash + '"\n (code\n ' +
|
|
value.replace(/^\{/, '').replace(/\}$/, '').trim() + '))\n';
|
|
|
|
const outPath = path.join(sxDir, file.replace(/\.sx$/, '.sxbc'));
|
|
fs.writeFileSync(outPath, sxbc);
|
|
|
|
const size = fs.statSync(outPath).size;
|
|
console.log(' ok', file, '→', Math.round(size / 1024) + 'K');
|
|
compiled++;
|
|
}
|
|
}
|
|
continue;
|
|
}
|
|
|
|
// Simple ok or error — skip
|
|
if (trimmed.match(/^\(ok \d+/) || trimmed.match(/^\(error \d+/)) {
|
|
if (trimmed.match(/^\(error/)) {
|
|
const epMatch = trimmed.match(/^\(error (\d+)/);
|
|
if (epMatch) {
|
|
const ep = parseInt(epMatch[1]);
|
|
const file = compileEpochs[ep];
|
|
if (file) {
|
|
console.error(' SKIP', file, '—', trimmed.slice(0, 80));
|
|
skipped++;
|
|
}
|
|
}
|
|
}
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// Copy compiled files to shared/static/wasm/sx/ for web serving
|
|
const staticSxDir = path.resolve(__dirname, '..', '..', '..', 'shared', 'static', 'wasm', 'sx');
|
|
if (fs.existsSync(staticSxDir)) {
|
|
let copied = 0;
|
|
for (const file of FILES) {
|
|
// Copy bytecode
|
|
for (const ext of ['.sxbc', '.sxbc.json']) {
|
|
const src = path.join(sxDir, file.replace(/\.sx$/, ext));
|
|
const dst = path.join(staticSxDir, file.replace(/\.sx$/, ext));
|
|
if (fs.existsSync(src)) {
|
|
fs.copyFileSync(src, dst);
|
|
copied++;
|
|
}
|
|
}
|
|
// Also sync .sx source files (fallback when .sxbc missing)
|
|
const sxSrc = path.join(sxDir, file);
|
|
const sxDst = path.join(staticSxDir, file);
|
|
if (fs.existsSync(sxSrc) && !fs.lstatSync(sxSrc).isSymbolicLink()) {
|
|
fs.copyFileSync(sxSrc, sxDst);
|
|
copied++;
|
|
}
|
|
}
|
|
console.log('Copied', copied, 'files to', staticSxDir);
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Generate module-manifest.json — dependency graph for lazy loading
|
|
// ---------------------------------------------------------------------------
|
|
|
|
console.log('Generating module manifest...');
|
|
|
|
// Extract library name from (define-library (namespace name) ...) in source
|
|
function extractLibraryName(source) {
|
|
const m = source.match(/\(define-library\s+(\([^)]+\))/);
|
|
return m ? m[1] : null;
|
|
}
|
|
|
|
// Extract top-level (import (namespace name)) deps from source
|
|
// Only matches imports BEFORE define-library (dependency declarations)
|
|
function extractImportDeps(source) {
|
|
const deps = [];
|
|
const lines = source.split('\n');
|
|
for (const line of lines) {
|
|
// Stop at define-library — imports after that are self-imports
|
|
if (line.startsWith('(define-library')) break;
|
|
const m = line.match(/^\(import\s+(\([^)]+\))\)/);
|
|
if (m) deps.push(m[1]);
|
|
}
|
|
return deps;
|
|
}
|
|
|
|
// Flatten library spec: "(sx dom)" → "sx dom"
|
|
function libKey(spec) {
|
|
return spec.replace(/^\(/, '').replace(/\)$/, '');
|
|
}
|
|
|
|
const manifest = {};
|
|
let entryFile = null;
|
|
|
|
for (const file of FILES) {
|
|
const srcPath = path.join(sxDir, file);
|
|
if (!fs.existsSync(srcPath)) continue;
|
|
const src = fs.readFileSync(srcPath, 'utf8');
|
|
const libName = extractLibraryName(src);
|
|
const deps = extractImportDeps(src);
|
|
const sxbcFile = file.replace(/\.sx$/, '.sxbc');
|
|
|
|
if (libName) {
|
|
manifest[libKey(libName)] = {
|
|
file: sxbcFile,
|
|
deps: deps.map(libKey),
|
|
};
|
|
} else if (deps.length > 0) {
|
|
// Entry point (no define-library, has imports)
|
|
entryFile = { file: sxbcFile, deps: deps.map(libKey) };
|
|
}
|
|
}
|
|
|
|
if (entryFile) {
|
|
// Partition entry deps into eager (needed at boot) and lazy (loaded on demand).
|
|
// Lazy deps are fetched by the suspension handler when the kernel requests them.
|
|
const LAZY_ENTRY_DEPS = new Set([
|
|
'sx bytecode', // JIT-only — enable-jit! runs after boot
|
|
]);
|
|
const eagerDeps = entryFile.deps.filter(d => !LAZY_ENTRY_DEPS.has(d));
|
|
const lazyDeps = entryFile.deps.filter(d => LAZY_ENTRY_DEPS.has(d));
|
|
manifest['_entry'] = {
|
|
file: entryFile.file,
|
|
deps: eagerDeps,
|
|
};
|
|
if (lazyDeps.length > 0) {
|
|
manifest['_entry'].lazy_deps = lazyDeps;
|
|
}
|
|
}
|
|
|
|
const manifestPath = path.join(sxDir, 'module-manifest.json');
|
|
fs.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2) + '\n');
|
|
console.log(' Wrote', manifestPath, '(' + Object.keys(manifest).length + ' modules)');
|
|
|
|
// Copy manifest to static dir
|
|
if (fs.existsSync(staticSxDir)) {
|
|
fs.copyFileSync(manifestPath, path.join(staticSxDir, 'module-manifest.json'));
|
|
console.log(' Copied manifest to', staticSxDir);
|
|
}
|
|
|
|
const total = Date.now() - t0;
|
|
console.log('Done:', compiled, 'compiled,', skipped, 'skipped in', Math.round(total / 1000) + 's');
|
|
|
|
fs.unlinkSync(tmpFile);
|