Reset to last known-good state (908f4f80) where links, stepper, and
islands all work, then recovered all hyperscript implementation,
conformance tests, behavioral tests, Playwright specs, site sandbox,
IO-aware server loading, and upstream test suite from f271c88a.
Excludes runtime changes (VM resolve hook, VmSuspended browser handler,
sx_ref.ml guard recovery) that need careful re-integration.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
234 lines
7.4 KiB
JavaScript
234 lines
7.4 KiB
JavaScript
// SX Site Renderer — OCaml subprocess driver for sandboxed page testing.
|
|
// Communicates via epoch protocol (stdin/stdout), no HTTP server needed.
|
|
// Usage:
|
|
// const renderer = new SxRenderer(projectRoot);
|
|
// await renderer.ready();
|
|
// const urls = await renderer.navUrls(); // [["href","label"], ...]
|
|
// const html = await renderer.render(url); // complete HTML string
|
|
// renderer.close();
|
|
|
|
const { spawn } = require('child_process');
|
|
const path = require('path');
|
|
|
|
class SxRenderer {
|
|
constructor(projectRoot) {
|
|
this.projectRoot = projectRoot;
|
|
this.epoch = 0;
|
|
this.pending = null;
|
|
this.chunks = []; // Buffer chunks — avoids O(n²) string concat
|
|
this.bufferLen = 0; // total bytes across chunks
|
|
this.readyResolve = null;
|
|
|
|
const exe = path.join(projectRoot, 'hosts/ocaml/_build/default/bin/sx_server.exe');
|
|
this.proc = spawn(exe, ['--site'], {
|
|
cwd: projectRoot,
|
|
env: { ...process.env, SX_PROJECT_DIR: projectRoot },
|
|
stdio: ['pipe', 'pipe', 'pipe'],
|
|
});
|
|
|
|
this.proc.stdout.on('data', (chunk) => this._onData(chunk.toString()));
|
|
// Drain stderr to prevent pipe deadlock with large stdout writes.
|
|
// OCaml writes JIT logs to stderr during render — if the stderr pipe
|
|
// fills up, stdout writes block and we deadlock.
|
|
this.stderrBuf = '';
|
|
this.proc.stderr.on('data', (chunk) => {
|
|
this.stderrBuf += chunk.toString();
|
|
});
|
|
this.proc.on('error', (err) => {
|
|
if (this.pending) {
|
|
this.pending.reject(new Error(`subprocess error: ${err.message}`));
|
|
this.pending = null;
|
|
}
|
|
});
|
|
this.proc.on('exit', (code) => {
|
|
if (this.pending) {
|
|
this.pending.reject(new Error(`subprocess exited with code ${code}`));
|
|
this.pending = null;
|
|
}
|
|
});
|
|
}
|
|
|
|
/** Wait for the subprocess to finish loading all .sx files. */
|
|
ready() {
|
|
return new Promise((resolve) => {
|
|
this.readyResolve = resolve;
|
|
});
|
|
}
|
|
|
|
/** Render a page URL to complete HTML. */
|
|
async render(urlPath) {
|
|
const escaped = urlPath.replace(/\\/g, '\\\\').replace(/"/g, '\\"');
|
|
return this._send(`(render-page "${escaped}")`);
|
|
}
|
|
|
|
/** Get all nav URLs as [[href, label], ...]. */
|
|
async navUrls() {
|
|
const raw = await this._send('(nav-urls)');
|
|
return this._parsePairList(raw);
|
|
}
|
|
|
|
/** Evaluate an SX expression. */
|
|
async eval(expr) {
|
|
return this._send(`(eval "${expr.replace(/\\/g, '\\\\').replace(/"/g, '\\"')}")`);
|
|
}
|
|
|
|
/** Get all page test specs as [[url, {has-text: [...], ...}], ...]. */
|
|
async pageTestSpecs() {
|
|
const raw = await this.eval('(map (fn (k) (list k (get page-test-specs k))) (keys page-test-specs))');
|
|
return this._parsePageSpecs(raw);
|
|
}
|
|
|
|
/** Parse page test specs from SX. Returns Map<url, {hasText?: string[], hasIsland?: string[]}>. */
|
|
_parsePageSpecs(sx) {
|
|
const specs = new Map();
|
|
// Each entry: ("url" {:has-text ("a" "b") :has-island ("c")})
|
|
const entryRe = /\("([^"]+)"\s+\{([^}]*)\}\)/g;
|
|
let m;
|
|
while ((m = entryRe.exec(sx))) {
|
|
const url = m[1];
|
|
const body = m[2];
|
|
const spec = {};
|
|
const textMatch = body.match(/:has-text\s+\(([^)]*)\)/);
|
|
if (textMatch) {
|
|
spec.hasText = [...textMatch[1].matchAll(/"([^"]*)"/g)].map(x => x[1]);
|
|
}
|
|
const islandMatch = body.match(/:has-island\s+\(([^)]*)\)/);
|
|
if (islandMatch) {
|
|
spec.hasIsland = [...islandMatch[1].matchAll(/"([^"]*)"/g)].map(x => x[1]);
|
|
}
|
|
specs.set(url, spec);
|
|
}
|
|
return specs;
|
|
}
|
|
|
|
/** Kill the subprocess. */
|
|
close() {
|
|
if (this.proc) {
|
|
this.proc.kill();
|
|
this.proc = null;
|
|
}
|
|
}
|
|
|
|
// --- internal ---
|
|
|
|
_send(command) {
|
|
return new Promise((resolve, reject) => {
|
|
if (this.pending) {
|
|
reject(new Error('concurrent send not supported'));
|
|
return;
|
|
}
|
|
this.epoch++;
|
|
this.pending = { epoch: this.epoch, resolve, reject };
|
|
this.proc.stdin.write(`(epoch ${this.epoch})\n${command}\n`);
|
|
// Response may already be in buffer from a previous stdout chunk
|
|
process.nextTick(() => this._tryResolve());
|
|
});
|
|
}
|
|
|
|
_onData(chunk) {
|
|
this.chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
this.bufferLen += chunk.length;
|
|
|
|
// Check for (ready) — startup complete
|
|
if (this.readyResolve) {
|
|
const buf = this._peekBuffer();
|
|
const idx = buf.indexOf('(ready)');
|
|
if (idx !== -1) {
|
|
this._consumeBytes(idx + 8); // skip (ready)\n
|
|
this.readyResolve();
|
|
this.readyResolve = null;
|
|
}
|
|
}
|
|
if (this.pending) this._tryResolve();
|
|
}
|
|
|
|
/** Merge all chunks into a single Buffer for parsing. */
|
|
_peekBuffer() {
|
|
if (this.chunks.length === 1) return this.chunks[0];
|
|
const merged = Buffer.concat(this.chunks);
|
|
this.chunks = [merged];
|
|
return merged;
|
|
}
|
|
|
|
/** Remove the first n bytes from the buffer. */
|
|
_consumeBytes(n) {
|
|
const buf = this._peekBuffer();
|
|
if (n >= buf.length) {
|
|
this.chunks = [];
|
|
this.bufferLen = 0;
|
|
} else {
|
|
this.chunks = [buf.slice(n)];
|
|
this.bufferLen = buf.length - n;
|
|
}
|
|
}
|
|
|
|
_tryResolve() {
|
|
if (!this.pending) return;
|
|
const ep = this.pending.epoch;
|
|
const buf = this._peekBuffer();
|
|
const str = buf.toString('utf8', 0, Math.min(buf.length, 40)); // just the header area
|
|
|
|
// ok-len: length-prefixed binary response
|
|
const lenPrefix = `(ok-len ${ep} `;
|
|
const lenIdx = str.indexOf(lenPrefix);
|
|
if (lenIdx !== -1) {
|
|
const afterPrefix = lenIdx + lenPrefix.length;
|
|
const closeParen = str.indexOf(')', afterPrefix);
|
|
if (closeParen === -1) return; // incomplete header
|
|
const n = parseInt(str.slice(afterPrefix, closeParen), 10);
|
|
const dataStart = closeParen + 2; // skip ")\n"
|
|
const dataEnd = dataStart + n;
|
|
if (buf.length < dataEnd) return; // incomplete data — wait for more
|
|
const data = buf.toString('utf8', dataStart, dataEnd);
|
|
this._consumeBytes(dataEnd + 1); // skip trailing \n
|
|
const { resolve } = this.pending;
|
|
this.pending = null;
|
|
resolve(data);
|
|
return;
|
|
}
|
|
|
|
// ok: simple response (single line)
|
|
const fullStr = buf.toString('utf8');
|
|
const okPrefix = `(ok ${ep} `;
|
|
const okIdx = fullStr.indexOf(okPrefix);
|
|
if (okIdx !== -1) {
|
|
const eol = fullStr.indexOf('\n', okIdx);
|
|
if (eol === -1) return;
|
|
const line = fullStr.slice(okIdx + okPrefix.length, eol - 1);
|
|
this._consumeBytes(eol + 1);
|
|
const { resolve } = this.pending;
|
|
this.pending = null;
|
|
resolve(line);
|
|
return;
|
|
}
|
|
|
|
// error
|
|
const errPrefix = `(error ${ep} `;
|
|
const errIdx = fullStr.indexOf(errPrefix);
|
|
if (errIdx !== -1) {
|
|
const eol = fullStr.indexOf('\n', errIdx);
|
|
if (eol === -1) return;
|
|
const msg = fullStr.slice(errIdx + errPrefix.length, eol - 1);
|
|
this._consumeBytes(eol + 1);
|
|
const { reject } = this.pending;
|
|
this.pending = null;
|
|
reject(new Error(`SX error: ${msg}`));
|
|
return;
|
|
}
|
|
}
|
|
|
|
/** Parse an SX list of pairs: (("a" "b") ("c" "d")) → [["a","b"], ["c","d"]] */
|
|
_parsePairList(sx) {
|
|
const pairs = [];
|
|
const re = /\("([^"\\]*(?:\\.[^"\\]*)*)"\s+"([^"\\]*(?:\\.[^"\\]*)*)"\)/g;
|
|
let m;
|
|
while ((m = re.exec(sx))) {
|
|
pairs.push([m[1].replace(/\\"/g, '"').replace(/\\\\/g, '\\'),
|
|
m[2].replace(/\\"/g, '"').replace(/\\\\/g, '\\')]);
|
|
}
|
|
return pairs;
|
|
}
|
|
}
|
|
|
|
module.exports = { SxRenderer };
|