Restore hyperscript work on stable site base (908f4f80)
Reset to last known-good state (908f4f80) where links, stepper, and
islands all work, then recovered all hyperscript implementation,
conformance tests, behavioral tests, Playwright specs, site sandbox,
IO-aware server loading, and upstream test suite from f271c88a.
Excludes runtime changes (VM resolve hook, VmSuspended browser handler,
sx_ref.ml guard recovery) that need careful re-integration.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
172
tests/playwright/pre-screen-sources.js
Normal file
172
tests/playwright/pre-screen-sources.js
Normal file
@@ -0,0 +1,172 @@
|
||||
#!/usr/bin/env node
|
||||
// pre-screen-sources.js — Identify hyperscript sources that hang the WASM parser
|
||||
//
|
||||
// For each unique _="..." source in hs-behavioral-data.js, spawns a child process
|
||||
// that loads the WASM kernel + HS modules, then tries to compile. If it takes >3s,
|
||||
// it's marked as hanging.
|
||||
//
|
||||
// Output:
|
||||
// tests/playwright/hs-safe-sources.json — sources that compile OK (or error)
|
||||
// tests/playwright/hs-hanging-sources.json — sources that hang the parser/compiler
|
||||
//
|
||||
// Usage: node tests/playwright/pre-screen-sources.js
|
||||
|
||||
const { execFileSync } = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const PROJECT_ROOT = path.resolve(__dirname, '../..');
|
||||
const DATA_FILE = path.join(__dirname, 'hs-behavioral-data.js');
|
||||
const WORKER_FILE = path.join(__dirname, '_pre-screen-worker-timed.js');
|
||||
|
||||
// Extract unique sources
|
||||
const data = require(DATA_FILE);
|
||||
const sourcesSet = new Set();
|
||||
for (const t of data) {
|
||||
const matches = t.html.matchAll(/_=['"]([^'"]+)['"]/g);
|
||||
for (const m of matches) sourcesSet.add(m[1]);
|
||||
}
|
||||
const allSources = [...sourcesSet];
|
||||
console.log(`Found ${allSources.length} unique hyperscript sources`);
|
||||
|
||||
// Process in batches — each batch is a fresh child process
|
||||
const BATCH_SIZE = 15;
|
||||
const TIMEOUT_MS = 30000; // 30s for a batch of 15 (allows ~2s per source)
|
||||
|
||||
const safe = [];
|
||||
const errors = [];
|
||||
const hanging = [];
|
||||
const timings = []; // {source, ms, status}
|
||||
|
||||
function testBatch(sources) {
|
||||
// Write sources to temp file for the worker
|
||||
const tmpFile = '/tmp/hs-batch-input.json';
|
||||
fs.writeFileSync(tmpFile, JSON.stringify(sources));
|
||||
|
||||
try {
|
||||
const output = execFileSync(process.execPath, [WORKER_FILE], {
|
||||
timeout: TIMEOUT_MS,
|
||||
encoding: 'utf8',
|
||||
env: { ...process.env, HS_BATCH_FILE: tmpFile },
|
||||
cwd: PROJECT_ROOT,
|
||||
});
|
||||
|
||||
// Parse results from worker stdout
|
||||
const lines = output.trim().split('\n');
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('RESULT:')) {
|
||||
const { source, status, detail, ms } = JSON.parse(line.slice(7));
|
||||
timings.push({ source, status, ms });
|
||||
if (status === 'ok') {
|
||||
safe.push(source);
|
||||
} else if (status === 'error') {
|
||||
errors.push({ source, detail });
|
||||
safe.push(source); // errors are still "safe" — they don't hang
|
||||
} else {
|
||||
hanging.push(source);
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} catch (e) {
|
||||
if (e.killed || e.signal === 'SIGTERM') {
|
||||
// Whole batch timed out — need to bisect
|
||||
return false;
|
||||
}
|
||||
// Other error (crash) — treat all as hanging
|
||||
console.error(` Batch crashed: ${e.message.slice(0, 200)}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function bisect(sources) {
|
||||
if (sources.length === 0) return;
|
||||
|
||||
if (sources.length === 1) {
|
||||
// Single source that we know hangs
|
||||
console.log(` HANG: ${sources[0].slice(0, 80)}`);
|
||||
hanging.push(sources[0]);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(` Bisecting ${sources.length} sources...`);
|
||||
|
||||
// Try first half
|
||||
const mid = Math.ceil(sources.length / 2);
|
||||
const first = sources.slice(0, mid);
|
||||
const second = sources.slice(mid);
|
||||
|
||||
if (!testBatch(first)) {
|
||||
bisect(first);
|
||||
}
|
||||
if (!testBatch(second)) {
|
||||
bisect(second);
|
||||
}
|
||||
}
|
||||
|
||||
// Process batches
|
||||
const totalBatches = Math.ceil(allSources.length / BATCH_SIZE);
|
||||
for (let i = 0; i < allSources.length; i += BATCH_SIZE) {
|
||||
const batch = allSources.slice(i, i + BATCH_SIZE);
|
||||
const batchNum = Math.floor(i / BATCH_SIZE) + 1;
|
||||
process.stdout.write(`Batch ${batchNum}/${totalBatches} (${batch.length} sources)... `);
|
||||
|
||||
if (testBatch(batch)) {
|
||||
console.log(`OK (${safe.length} safe, ${hanging.length} hanging so far)`);
|
||||
} else {
|
||||
console.log(`TIMEOUT — bisecting`);
|
||||
bisect(batch);
|
||||
console.log(` After bisect: ${safe.length} safe, ${hanging.length} hanging`);
|
||||
}
|
||||
}
|
||||
|
||||
// Write results
|
||||
const safeFile = path.join(__dirname, 'hs-safe-sources.json');
|
||||
const hangFile = path.join(__dirname, 'hs-hanging-sources.json');
|
||||
const errFile = path.join(__dirname, 'hs-error-sources.json');
|
||||
|
||||
fs.writeFileSync(safeFile, JSON.stringify(safe, null, 2) + '\n');
|
||||
fs.writeFileSync(hangFile, JSON.stringify(hanging, null, 2) + '\n');
|
||||
fs.writeFileSync(errFile, JSON.stringify(errors, null, 2) + '\n');
|
||||
|
||||
console.log(`\nDone!`);
|
||||
console.log(` Safe (no hang): ${safe.length} (written to ${path.relative(PROJECT_ROOT, safeFile)})`);
|
||||
console.log(` Errors: ${errors.length} (written to ${path.relative(PROJECT_ROOT, errFile)})`);
|
||||
console.log(` Hanging: ${hanging.length} (written to ${path.relative(PROJECT_ROOT, hangFile)})`);
|
||||
|
||||
if (hanging.length > 0) {
|
||||
console.log(`\nHanging sources:`);
|
||||
for (const s of hanging) {
|
||||
console.log(` - ${s}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
console.log(`\nError sources:`);
|
||||
for (const e of errors) {
|
||||
console.log(` - ${e.source}`);
|
||||
console.log(` ${e.detail}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Timing summary — show slowest sources
|
||||
if (timings.length > 0) {
|
||||
timings.sort((a, b) => b.ms - a.ms);
|
||||
console.log(`\nSlowest sources (top 20):`);
|
||||
for (let i = 0; i < Math.min(20, timings.length); i++) {
|
||||
const t = timings[i];
|
||||
console.log(` ${t.ms}ms [${t.status}] ${t.source.slice(0, 80)}`);
|
||||
}
|
||||
const total = timings.reduce((s, t) => s + t.ms, 0);
|
||||
const avg = Math.round(total / timings.length);
|
||||
console.log(`\nTotal: ${total}ms, Avg: ${avg}ms, Max: ${timings[0].ms}ms`);
|
||||
|
||||
// Flag anything over 3s as "slow" (potential near-hang)
|
||||
const slow = timings.filter(t => t.ms > 3000);
|
||||
if (slow.length > 0) {
|
||||
console.log(`\nWARNING: ${slow.length} sources took >3s:`);
|
||||
for (const t of slow) {
|
||||
console.log(` ${t.ms}ms ${t.source}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user