more plans
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 12m0s
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 12m0s
This commit is contained in:
70
test-sx-web/Dockerfile
Normal file
70
test-sx-web/Dockerfile
Normal file
@@ -0,0 +1,70 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM python:3.11-slim AS base
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PYTHONPATH=/app \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
APP_PORT=8000 \
|
||||
APP_MODULE=app:app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates nodejs \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY shared/requirements.txt ./requirements.txt
|
||||
RUN pip install -r requirements.txt && \
|
||||
pip install pytest pytest-json-report
|
||||
|
||||
# Shared code (including tests)
|
||||
COPY shared/ ./shared/
|
||||
|
||||
# App code — test dashboard (from test-sx-web/ in build context)
|
||||
COPY test-sx-web/ ./test-app-tmp/
|
||||
RUN cp -r test-app-tmp/app.py test-app-tmp/path_setup.py \
|
||||
test-app-tmp/bp test-app-tmp/sx test-app-tmp/services \
|
||||
test-app-tmp/runner.py test-app-tmp/__init__.py ./ 2>/dev/null || true && \
|
||||
([ -d test-app-tmp/sxc ] && cp -r test-app-tmp/sxc ./ || true) && \
|
||||
rm -rf test-app-tmp
|
||||
|
||||
# sx_docs app code (for its tests, if any)
|
||||
COPY sx/ ./sx-app-tmp/
|
||||
RUN mkdir -p sx_docs && \
|
||||
([ -d sx-app-tmp/tests ] && cp -r sx-app-tmp/tests sx_docs/ || true) && \
|
||||
([ -d sx-app-tmp/sx ] && cp -r sx-app-tmp/sx sx_docs/sx || true) && \
|
||||
([ -d sx-app-tmp/sxc ] && cp -r sx-app-tmp/sxc sx_docs/sxc || true) && \
|
||||
([ -d sx-app-tmp/content ] && cp -r sx-app-tmp/content sx_docs/content || true) && \
|
||||
([ -f sx-app-tmp/__init__.py ] && cp sx-app-tmp/__init__.py sx_docs/ || true) && \
|
||||
rm -rf sx-app-tmp
|
||||
|
||||
# Sibling models for cross-domain SQLAlchemy imports
|
||||
COPY blog/__init__.py ./blog/__init__.py
|
||||
COPY blog/models/ ./blog/models/
|
||||
COPY market/__init__.py ./market/__init__.py
|
||||
COPY market/models/ ./market/models/
|
||||
COPY cart/__init__.py ./cart/__init__.py
|
||||
COPY cart/models/ ./cart/models/
|
||||
COPY events/__init__.py ./events/__init__.py
|
||||
COPY events/models/ ./events/models/
|
||||
COPY federation/__init__.py ./federation/__init__.py
|
||||
COPY federation/models/ ./federation/models/
|
||||
COPY account/__init__.py ./account/__init__.py
|
||||
COPY account/models/ ./account/models/
|
||||
COPY relations/__init__.py ./relations/__init__.py
|
||||
COPY relations/models/ ./relations/models/
|
||||
COPY likes/__init__.py ./likes/__init__.py
|
||||
COPY likes/models/ ./likes/models/
|
||||
COPY orders/__init__.py ./orders/__init__.py
|
||||
COPY orders/models/ ./orders/models/
|
||||
|
||||
COPY test-sx-web/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
RUN useradd -m -u 10001 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
EXPOSE ${APP_PORT}
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
0
test-sx-web/__init__.py
Normal file
0
test-sx-web/__init__.py
Normal file
46
test-sx-web/app.py
Normal file
46
test-sx-web/app.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from __future__ import annotations
|
||||
import path_setup # noqa: F401
|
||||
|
||||
from bp import register_dashboard
|
||||
from services import register_domain_services
|
||||
|
||||
|
||||
async def test_context() -> dict:
|
||||
"""Test app context processor — standalone, no cross-service fragments."""
|
||||
from shared.infrastructure.context import base_context
|
||||
ctx = await base_context()
|
||||
ctx["menu_items"] = []
|
||||
ctx["cart_mini"] = ""
|
||||
ctx["auth_menu"] = ""
|
||||
ctx["nav_tree"] = ""
|
||||
return ctx
|
||||
|
||||
|
||||
def create_app() -> "Quart":
|
||||
from shared.infrastructure.factory import create_base_app
|
||||
app = create_base_app(
|
||||
"test",
|
||||
context_fn=test_context,
|
||||
domain_services_fn=register_domain_services,
|
||||
no_oauth=True,
|
||||
no_db=True,
|
||||
)
|
||||
|
||||
# Load .sx components
|
||||
import os
|
||||
from shared.sx.jinja_bridge import load_service_components
|
||||
load_service_components(os.path.dirname(__file__))
|
||||
|
||||
app.register_blueprint(register_dashboard(url_prefix="/"))
|
||||
|
||||
# Run tests on startup
|
||||
@app.before_serving
|
||||
async def _run_tests_on_startup():
|
||||
import runner
|
||||
import asyncio
|
||||
asyncio.create_task(runner.run_tests())
|
||||
|
||||
return app
|
||||
|
||||
|
||||
app = create_app()
|
||||
1
test-sx-web/bp/__init__.py
Normal file
1
test-sx-web/bp/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .dashboard.routes import register as register_dashboard
|
||||
0
test-sx-web/bp/dashboard/__init__.py
Normal file
0
test-sx-web/bp/dashboard/__init__.py
Normal file
102
test-sx-web/bp/dashboard/routes.py
Normal file
102
test-sx-web/bp/dashboard/routes.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Test dashboard routes."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from quart import Blueprint, Response, make_response, request
|
||||
|
||||
|
||||
def register(url_prefix: str = "/") -> Blueprint:
|
||||
bp = Blueprint("dashboard", __name__, url_prefix=url_prefix)
|
||||
|
||||
@bp.get("/")
|
||||
async def index():
|
||||
"""Full page dashboard with last results."""
|
||||
from shared.sx.page import get_template_context
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
from sxc.pages.renders import render_dashboard_page_sx
|
||||
import runner
|
||||
|
||||
ctx = await get_template_context()
|
||||
result = runner.get_results()
|
||||
running = runner.is_running()
|
||||
csrf = generate_csrf_token()
|
||||
active_filter = request.args.get("filter")
|
||||
active_service = request.args.get("service")
|
||||
|
||||
html = await render_dashboard_page_sx(
|
||||
ctx, result, running, csrf,
|
||||
active_filter=active_filter,
|
||||
active_service=active_service,
|
||||
)
|
||||
return await make_response(html, 200)
|
||||
|
||||
@bp.post("/run")
|
||||
async def run():
|
||||
"""Trigger a test run, redirect to /."""
|
||||
import runner
|
||||
|
||||
if not runner.is_running():
|
||||
asyncio.create_task(runner.run_tests())
|
||||
|
||||
# HX-Redirect for HTMX, regular redirect for non-HTMX
|
||||
if request.headers.get("SX-Request") or request.headers.get("HX-Request"):
|
||||
resp = Response("", status=200)
|
||||
resp.headers["HX-Redirect"] = "/"
|
||||
return resp
|
||||
|
||||
from quart import redirect as qredirect
|
||||
return qredirect("/")
|
||||
|
||||
@bp.get("/test/<path:nodeid>")
|
||||
async def test_detail(nodeid: str):
|
||||
"""Test detail view — full page or sx wire format."""
|
||||
import runner
|
||||
|
||||
test = runner.get_test(nodeid)
|
||||
if not test:
|
||||
from quart import abort
|
||||
abort(404)
|
||||
|
||||
is_htmx = bool(request.headers.get("SX-Request") or request.headers.get("HX-Request"))
|
||||
|
||||
if is_htmx:
|
||||
# S-expression wire format — sx.js renders client-side
|
||||
from shared.sx.helpers import sx_response
|
||||
from sxc.pages.renders import test_detail_sx
|
||||
return sx_response(await test_detail_sx(test))
|
||||
|
||||
# Full page render (direct navigation / refresh)
|
||||
from shared.sx.page import get_template_context
|
||||
from sxc.pages.renders import render_test_detail_page_sx
|
||||
|
||||
ctx = await get_template_context()
|
||||
html = await render_test_detail_page_sx(ctx, test)
|
||||
return await make_response(html, 200)
|
||||
|
||||
@bp.get("/results")
|
||||
async def results():
|
||||
"""HTMX partial — poll target for results table."""
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
from sxc.pages.renders import render_results_partial_sx
|
||||
import runner
|
||||
|
||||
result = runner.get_results()
|
||||
running = runner.is_running()
|
||||
csrf = generate_csrf_token()
|
||||
active_filter = request.args.get("filter")
|
||||
active_service = request.args.get("service")
|
||||
|
||||
html = await render_results_partial_sx(
|
||||
result, running, csrf,
|
||||
active_filter=active_filter,
|
||||
active_service=active_service,
|
||||
)
|
||||
|
||||
resp = Response(html, status=200, content_type="text/html")
|
||||
# If still running, tell HTMX to keep polling
|
||||
if running:
|
||||
resp.headers["HX-Trigger-After-Swap"] = "test-still-running"
|
||||
return resp
|
||||
|
||||
return bp
|
||||
25
test-sx-web/entrypoint.sh
Executable file
25
test-sx-web/entrypoint.sh
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# No database — skip DB wait and migrations
|
||||
|
||||
# Clear Redis page cache on deploy
|
||||
if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
python3 -c "
|
||||
import redis, os
|
||||
r = redis.from_url(os.environ['REDIS_URL'])
|
||||
r.flushdb()
|
||||
" || echo "Redis flush failed (non-fatal), continuing..."
|
||||
fi
|
||||
|
||||
# Start the app
|
||||
RELOAD_FLAG=""
|
||||
if [[ "${RELOAD:-}" == "true" ]]; then
|
||||
RELOAD_FLAG="--reload"
|
||||
python3 -m shared.dev_watcher &
|
||||
fi
|
||||
PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" \
|
||||
--bind 0.0.0.0:${PORT:-8000} \
|
||||
--workers ${WORKERS:-1} \
|
||||
--keep-alive 75 \
|
||||
${RELOAD_FLAG}
|
||||
9
test-sx-web/path_setup.py
Normal file
9
test-sx-web/path_setup.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
_app_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
_project_root = os.path.dirname(_app_dir)
|
||||
|
||||
for _p in (_project_root, _app_dir):
|
||||
if _p not in sys.path:
|
||||
sys.path.insert(0, _p)
|
||||
213
test-sx-web/runner.py
Normal file
213
test-sx-web/runner.py
Normal file
@@ -0,0 +1,213 @@
|
||||
"""Pytest subprocess runner + in-memory result storage."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# In-memory state
|
||||
_last_result: dict | None = None
|
||||
_running: bool = False
|
||||
|
||||
# Each service group runs in its own pytest subprocess with its own PYTHONPATH
|
||||
_SERVICE_GROUPS: list[dict] = [
|
||||
{"name": "shared", "dirs": ["shared/tests/", "shared/sx/tests/"],
|
||||
"pythonpath": None},
|
||||
{"name": "sx_docs", "dirs": ["sx_docs/tests/"],
|
||||
"pythonpath": "/app/sx_docs"},
|
||||
]
|
||||
|
||||
_SERVICE_ORDER = [g["name"] for g in _SERVICE_GROUPS]
|
||||
_REPORT_PATH = "/tmp/test-report-{}.json"
|
||||
|
||||
|
||||
def _parse_report(path: str) -> tuple[list[dict], dict]:
|
||||
"""Parse a pytest-json-report file."""
|
||||
rp = Path(path)
|
||||
if not rp.exists():
|
||||
return [], {}
|
||||
try:
|
||||
report = json.loads(rp.read_text())
|
||||
except (json.JSONDecodeError, OSError):
|
||||
return [], {}
|
||||
|
||||
summary = report.get("summary", {})
|
||||
tests_raw = report.get("tests", [])
|
||||
|
||||
tests = []
|
||||
for t in tests_raw:
|
||||
tests.append({
|
||||
"nodeid": t.get("nodeid", ""),
|
||||
"outcome": t.get("outcome", "unknown"),
|
||||
"duration": round(t.get("duration", 0), 4),
|
||||
"longrepr": (t.get("call", {}) or {}).get("longrepr", ""),
|
||||
})
|
||||
return tests, summary
|
||||
|
||||
|
||||
async def _run_group(group: dict) -> tuple[list[dict], dict, str]:
|
||||
"""Run pytest for a single service group."""
|
||||
existing = [d for d in group["dirs"] if Path(f"/app/{d}").is_dir()]
|
||||
if not existing:
|
||||
return [], {}, ""
|
||||
|
||||
report_file = _REPORT_PATH.format(group["name"])
|
||||
cmd = [
|
||||
"python3", "-m", "pytest",
|
||||
*existing,
|
||||
"--json-report",
|
||||
f"--json-report-file={report_file}",
|
||||
"-q",
|
||||
"--tb=short",
|
||||
]
|
||||
env = {**os.environ}
|
||||
if group["pythonpath"]:
|
||||
env["PYTHONPATH"] = group["pythonpath"] + ":" + env.get("PYTHONPATH", "")
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.STDOUT,
|
||||
cwd="/app",
|
||||
env=env,
|
||||
)
|
||||
stdout, _ = await proc.communicate()
|
||||
stdout_str = (stdout or b"").decode("utf-8", errors="replace")
|
||||
tests, summary = _parse_report(report_file)
|
||||
return tests, summary, stdout_str
|
||||
|
||||
|
||||
async def run_tests() -> dict:
|
||||
"""Run pytest in subprocess, parse JSON report, store results."""
|
||||
global _last_result, _running
|
||||
|
||||
if _running:
|
||||
return {"status": "already_running"}
|
||||
|
||||
_running = True
|
||||
started_at = time.time()
|
||||
|
||||
try:
|
||||
tasks = [_run_group(g) for g in _SERVICE_GROUPS]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
all_tests: list[dict] = []
|
||||
total_passed = total_failed = total_errors = total_skipped = total_count = 0
|
||||
all_stdout: list[str] = []
|
||||
|
||||
for i, res in enumerate(results):
|
||||
if isinstance(res, Exception):
|
||||
log.error("Group %s failed: %s", _SERVICE_GROUPS[i]["name"], res)
|
||||
continue
|
||||
tests, summary, stdout_str = res
|
||||
all_tests.extend(tests)
|
||||
total_passed += summary.get("passed", 0)
|
||||
total_failed += summary.get("failed", 0)
|
||||
total_errors += summary.get("error", 0)
|
||||
total_skipped += summary.get("skipped", 0)
|
||||
total_count += summary.get("total", len(tests))
|
||||
if stdout_str.strip():
|
||||
all_stdout.append(stdout_str)
|
||||
|
||||
finished_at = time.time()
|
||||
status = "failed" if total_failed > 0 or total_errors > 0 else "passed"
|
||||
|
||||
_last_result = {
|
||||
"status": status,
|
||||
"started_at": started_at,
|
||||
"finished_at": finished_at,
|
||||
"duration": round(finished_at - started_at, 2),
|
||||
"passed": total_passed,
|
||||
"failed": total_failed,
|
||||
"errors": total_errors,
|
||||
"skipped": total_skipped,
|
||||
"total": total_count,
|
||||
"tests": all_tests,
|
||||
"stdout": "\n".join(all_stdout)[-5000:],
|
||||
}
|
||||
|
||||
log.info(
|
||||
"Test run complete: %s (%d passed, %d failed, %d errors, %.1fs)",
|
||||
status, total_passed, total_failed, total_errors,
|
||||
_last_result["duration"],
|
||||
)
|
||||
return _last_result
|
||||
|
||||
except Exception:
|
||||
log.exception("Test run failed")
|
||||
finished_at = time.time()
|
||||
_last_result = {
|
||||
"status": "error",
|
||||
"started_at": started_at,
|
||||
"finished_at": finished_at,
|
||||
"duration": round(finished_at - started_at, 2),
|
||||
"passed": 0,
|
||||
"failed": 0,
|
||||
"errors": 1,
|
||||
"skipped": 0,
|
||||
"total": 0,
|
||||
"tests": [],
|
||||
"stdout": "",
|
||||
}
|
||||
return _last_result
|
||||
finally:
|
||||
_running = False
|
||||
|
||||
|
||||
def get_results() -> dict | None:
|
||||
"""Return last run results."""
|
||||
return _last_result
|
||||
|
||||
|
||||
def get_test(nodeid: str) -> dict | None:
|
||||
"""Look up a single test by nodeid."""
|
||||
if not _last_result:
|
||||
return None
|
||||
for t in _last_result["tests"]:
|
||||
if t["nodeid"] == nodeid:
|
||||
return t
|
||||
return None
|
||||
|
||||
|
||||
def is_running() -> bool:
|
||||
"""Check if tests are currently running."""
|
||||
return _running
|
||||
|
||||
|
||||
def _service_from_nodeid(nodeid: str) -> str:
|
||||
"""Extract service name from a test nodeid."""
|
||||
parts = nodeid.split("/")
|
||||
return parts[0] if len(parts) >= 2 else "other"
|
||||
|
||||
|
||||
def group_tests_by_service(tests: list[dict]) -> list[dict]:
|
||||
"""Group tests into ordered sections by service."""
|
||||
buckets: dict[str, list[dict]] = OrderedDict()
|
||||
for svc in _SERVICE_ORDER:
|
||||
buckets[svc] = []
|
||||
for t in tests:
|
||||
svc = _service_from_nodeid(t["nodeid"])
|
||||
if svc not in buckets:
|
||||
buckets[svc] = []
|
||||
buckets[svc].append(t)
|
||||
|
||||
sections = []
|
||||
for svc, svc_tests in buckets.items():
|
||||
if not svc_tests:
|
||||
continue
|
||||
sections.append({
|
||||
"service": svc,
|
||||
"tests": svc_tests,
|
||||
"total": len(svc_tests),
|
||||
"passed": sum(1 for t in svc_tests if t["outcome"] == "passed"),
|
||||
"failed": sum(1 for t in svc_tests if t["outcome"] == "failed"),
|
||||
"errors": sum(1 for t in svc_tests if t["outcome"] == "error"),
|
||||
"skipped": sum(1 for t in svc_tests if t["outcome"] == "skipped"),
|
||||
})
|
||||
return sections
|
||||
6
test-sx-web/services/__init__.py
Normal file
6
test-sx-web/services/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Test app service registration."""
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
def register_domain_services() -> None:
|
||||
"""Register services for the test app (none needed)."""
|
||||
95
test-sx-web/sx/components.sx
Normal file
95
test-sx-web/sx/components.sx
Normal file
@@ -0,0 +1,95 @@
|
||||
;; Test service composition defcomps — replaces Python string concatenation
|
||||
;; in test/sxc/pages/__init__.py.
|
||||
|
||||
;; Service filter nav links
|
||||
(defcomp ~test-service-nav (&key services active-service)
|
||||
(<>
|
||||
(~nav-link :href "/" :label "all"
|
||||
:is-selected (if (not active-service) "true" nil)
|
||||
:select-colours "aria-selected:bg-sky-200 aria-selected:text-sky-900")
|
||||
(map (lambda (svc)
|
||||
(~nav-link :href (str "/?service=" svc) :label svc
|
||||
:is-selected (if (= active-service svc) "true" nil)
|
||||
:select-colours "aria-selected:bg-sky-200 aria-selected:text-sky-900"))
|
||||
services)))
|
||||
|
||||
;; Test header menu row
|
||||
(defcomp ~test-header-row (&key services active-service)
|
||||
(~menu-row-sx :id "test-row" :level 1 :colour "sky"
|
||||
:link-href "/" :link-label "Tests" :icon "fa fa-flask"
|
||||
:nav (~test-service-nav :services services :active-service active-service)
|
||||
:child-id "test-header-child"))
|
||||
|
||||
;; Layout: full page header stack (standalone — no root-header-auto)
|
||||
(defcomp ~test-layout-full (&key services active-service)
|
||||
(~test-header-row :services services :active-service active-service))
|
||||
|
||||
;; Map test dicts to test-row components
|
||||
(defcomp ~test-rows (&key tests)
|
||||
(<> (map (lambda (t)
|
||||
(~test-row
|
||||
:nodeid (get t "nodeid")
|
||||
:outcome (get t "outcome")
|
||||
:duration (str (get t "duration"))
|
||||
:longrepr (or (get t "longrepr") "")))
|
||||
tests)))
|
||||
|
||||
;; Grouped test rows with service headers
|
||||
(defcomp ~test-grouped-rows (&key sections)
|
||||
(<> (map (lambda (sec)
|
||||
(<> (~test-service-header
|
||||
:service (get sec "service")
|
||||
:total (str (get sec "total"))
|
||||
:passed (str (get sec "passed"))
|
||||
:failed (str (get sec "failed")))
|
||||
(~test-rows :tests (get sec "tests"))))
|
||||
sections)))
|
||||
|
||||
;; Results partial: conditional rendering based on running/result state
|
||||
(defcomp ~test-results-partial (&key status summary-data tests sections has-failures)
|
||||
(let* ((state (get summary-data "state")))
|
||||
(<>
|
||||
(~test-summary
|
||||
:status (get summary-data "status")
|
||||
:passed (get summary-data "passed")
|
||||
:failed (get summary-data "failed")
|
||||
:errors (get summary-data "errors")
|
||||
:skipped (get summary-data "skipped")
|
||||
:total (get summary-data "total")
|
||||
:duration (get summary-data "duration")
|
||||
:last-run (get summary-data "last_run")
|
||||
:running (get summary-data "running")
|
||||
:csrf (get summary-data "csrf")
|
||||
:active-filter (get summary-data "active_filter"))
|
||||
(cond
|
||||
((= state "running") (~test-running-indicator))
|
||||
((= state "no-results") (~test-no-results))
|
||||
((= state "empty-filtered") (~test-no-results))
|
||||
(true (~test-results-table
|
||||
:rows (~test-grouped-rows :sections sections)
|
||||
:has-failures has-failures))))))
|
||||
|
||||
;; Wrap results in a div with optional HTMX polling
|
||||
(defcomp ~test-results-wrap (&key running inner)
|
||||
(div :id "test-results" :class "space-y-6 p-4"
|
||||
:sx-get (when running "/results")
|
||||
:sx-trigger (when running "every 2s")
|
||||
:sx-swap (when running "outerHTML")
|
||||
inner))
|
||||
|
||||
;; Test detail section wrapper
|
||||
(defcomp ~test-detail-section (&key test)
|
||||
(section :id "main-panel"
|
||||
:class "flex-1 md:h-full md:min-h-0 overflow-y-auto overscroll-contain js-grid-viewport"
|
||||
(~test-detail
|
||||
:nodeid (get test "nodeid")
|
||||
:outcome (get test "outcome")
|
||||
:duration (str (get test "duration"))
|
||||
:longrepr (or (get test "longrepr") ""))))
|
||||
|
||||
;; Detail page header stack (standalone — no root-header-auto)
|
||||
(defcomp ~test-detail-layout-full (&key services test-nodeid test-label)
|
||||
(<> (~test-header-row :services services)
|
||||
(~menu-row-sx :id "test-detail-row" :level 2 :colour "sky"
|
||||
:link-href (str "/test/" test-nodeid)
|
||||
:link-label test-label)))
|
||||
148
test-sx-web/sx/dashboard.sx
Normal file
148
test-sx-web/sx/dashboard.sx
Normal file
@@ -0,0 +1,148 @@
|
||||
;; Test dashboard components
|
||||
|
||||
(defcomp ~test-status-badge (&key status)
|
||||
(span :class (str "inline-flex items-center rounded-full border px-3 py-1 text-sm font-medium "
|
||||
(if (= status "running") "border-amber-300 bg-amber-50 text-amber-700 animate-pulse"
|
||||
(if (= status "passed") "border-emerald-300 bg-emerald-50 text-emerald-700"
|
||||
(if (= status "failed") "border-rose-300 bg-rose-50 text-rose-700"
|
||||
"border-stone-300 bg-stone-50 text-stone-700"))))
|
||||
status))
|
||||
|
||||
(defcomp ~test-run-button (&key running csrf)
|
||||
(form :method "POST" :action "/run" :class "inline"
|
||||
(input :type "hidden" :name "csrf_token" :value csrf)
|
||||
(button :type "submit"
|
||||
:class (str "rounded bg-stone-800 px-4 py-2 text-sm font-medium text-white hover:bg-stone-700 "
|
||||
"disabled:opacity-50 disabled:cursor-not-allowed transition-colors")
|
||||
:disabled (if running "true" nil)
|
||||
(if running "Running..." "Run Tests"))))
|
||||
|
||||
(defcomp ~test-filter-card (&key href label count colour-border colour-bg colour-text active)
|
||||
(a :href href
|
||||
:sx-get href
|
||||
:sx-target "#main-panel"
|
||||
:sx-select "#main-panel"
|
||||
:sx-swap "outerHTML"
|
||||
:sx-push-url "true"
|
||||
:class (str "block rounded border p-3 text-center transition-colors no-underline hover:opacity-80 "
|
||||
colour-border " " colour-bg " "
|
||||
(if active "ring-2 ring-offset-1 ring-stone-500 " ""))
|
||||
(div :class (str "text-3xl font-bold " colour-text) count)
|
||||
(div :class (str "text-sm " colour-text) label)))
|
||||
|
||||
(defcomp ~test-summary (&key status passed failed errors skipped total duration last-run running csrf active-filter)
|
||||
(div :class "space-y-4"
|
||||
(div :class "flex items-center justify-between flex-wrap gap-3"
|
||||
(div :class "flex items-center gap-3"
|
||||
(h2 :class "text-2xl font-semibold text-stone-800" "Test Results")
|
||||
(when status (~test-status-badge :status status)))
|
||||
(~test-run-button :running running :csrf csrf))
|
||||
(when status
|
||||
(div :class "grid grid-cols-2 sm:grid-cols-3 md:grid-cols-6 gap-3"
|
||||
(~test-filter-card :href "/" :label "Total" :count total
|
||||
:colour-border "border-stone-200" :colour-bg "bg-white"
|
||||
:colour-text "text-stone-800"
|
||||
:active (if (= active-filter nil) "true" nil))
|
||||
(~test-filter-card :href "/?filter=passed" :label "Passed" :count passed
|
||||
:colour-border "border-emerald-200" :colour-bg "bg-emerald-50"
|
||||
:colour-text "text-emerald-700"
|
||||
:active (if (= active-filter "passed") "true" nil))
|
||||
(~test-filter-card :href "/?filter=failed" :label "Failed" :count failed
|
||||
:colour-border "border-rose-200" :colour-bg "bg-rose-50"
|
||||
:colour-text "text-rose-700"
|
||||
:active (if (= active-filter "failed") "true" nil))
|
||||
(~test-filter-card :href "/?filter=errors" :label "Errors" :count errors
|
||||
:colour-border "border-orange-200" :colour-bg "bg-orange-50"
|
||||
:colour-text "text-orange-700"
|
||||
:active (if (= active-filter "errors") "true" nil))
|
||||
(~test-filter-card :href "/?filter=skipped" :label "Skipped" :count skipped
|
||||
:colour-border "border-sky-200" :colour-bg "bg-sky-50"
|
||||
:colour-text "text-sky-700"
|
||||
:active (if (= active-filter "skipped") "true" nil))
|
||||
(~test-filter-card :href "/" :label "Duration" :count (str duration "s")
|
||||
:colour-border "border-stone-200" :colour-bg "bg-white"
|
||||
:colour-text "text-stone-800" :active nil))
|
||||
(div :class "text-sm text-stone-400" (str "Last run: " last-run)))))
|
||||
|
||||
(defcomp ~test-service-header (&key service total passed failed)
|
||||
(tr :class "border-b-2 border-stone-300 bg-stone-100"
|
||||
(td :class "px-3 py-2 text-sm font-bold text-stone-700" :colspan "4"
|
||||
(span service)
|
||||
(span :class "ml-2 text-xs font-normal text-stone-500"
|
||||
(str total " tests, " passed " passed, " failed " failed")))))
|
||||
|
||||
(defcomp ~test-row (&key nodeid outcome duration longrepr)
|
||||
(tr :class (str "border-b border-stone-100 "
|
||||
(if (= outcome "passed") "bg-white"
|
||||
(if (= outcome "failed") "bg-rose-50"
|
||||
(if (= outcome "skipped") "bg-sky-50"
|
||||
"bg-orange-50"))))
|
||||
(td :class "px-3 py-2 text-sm font-mono text-stone-700 max-w-0 truncate" :title nodeid
|
||||
(a :href (str "/test/" nodeid)
|
||||
:sx-get (str "/test/" nodeid)
|
||||
:sx-target "#main-panel"
|
||||
:sx-select "#main-panel"
|
||||
:sx-swap "outerHTML"
|
||||
:sx-push-url "true"
|
||||
:class "hover:underline hover:text-sky-600"
|
||||
nodeid))
|
||||
(td :class "px-3 py-2 text-center"
|
||||
(span :class (str "inline-flex items-center rounded-full border px-2 py-0.5 text-xs font-medium "
|
||||
(if (= outcome "passed") "border-emerald-300 bg-emerald-50 text-emerald-700"
|
||||
(if (= outcome "failed") "border-rose-300 bg-rose-50 text-rose-700"
|
||||
(if (= outcome "skipped") "border-sky-300 bg-sky-50 text-sky-700"
|
||||
"border-orange-300 bg-orange-50 text-orange-700"))))
|
||||
outcome))
|
||||
(td :class "px-3 py-2 text-right text-sm text-stone-500 tabular-nums" (str duration "s"))
|
||||
(td :class "px-3 py-2 text-sm text-rose-600 font-mono max-w-xs truncate" :title longrepr
|
||||
(when longrepr longrepr))))
|
||||
|
||||
(defcomp ~test-results-table (&key rows has-failures)
|
||||
(div :class "overflow-x-auto rounded border border-stone-200 bg-white"
|
||||
(table :class "w-full text-left"
|
||||
(thead
|
||||
(tr :class "border-b border-stone-200 bg-stone-50"
|
||||
(th :class "px-3 py-2 text-sm font-medium text-stone-600" "Test")
|
||||
(th :class "px-3 py-2 text-xs font-medium text-stone-600 text-center w-24" "Status")
|
||||
(th :class "px-3 py-2 text-xs font-medium text-stone-600 text-right w-20" "Time")
|
||||
(th :class "px-3 py-2 text-xs font-medium text-stone-600 w-48" "Error")))
|
||||
(tbody (when rows rows)))))
|
||||
|
||||
(defcomp ~test-running-indicator ()
|
||||
(div :class "flex items-center justify-center py-12 text-stone-500"
|
||||
(div :class "flex items-center gap-3"
|
||||
(div :class "animate-spin h-6 w-6 border-2 border-stone-300 border-t-stone-600 rounded-full")
|
||||
(span :class "text-sm" "Running tests..."))))
|
||||
|
||||
(defcomp ~test-no-results ()
|
||||
(div :class "flex items-center justify-center py-12 text-stone-400"
|
||||
(div :class "text-center"
|
||||
(div :class "text-4xl mb-2" "?")
|
||||
(div :class "text-sm" "No test results yet. Click Run Tests to start."))))
|
||||
|
||||
(defcomp ~test-detail (&key nodeid outcome duration longrepr)
|
||||
(div :class "space-y-6 p-4"
|
||||
(div :class "flex items-center gap-3"
|
||||
(a :href "/"
|
||||
:sx-get "/"
|
||||
:sx-target "#main-panel"
|
||||
:sx-select "#main-panel"
|
||||
:sx-swap "outerHTML"
|
||||
:sx-push-url "true"
|
||||
:class "text-sky-600 hover:text-sky-800 text-sm"
|
||||
"← Back to results")
|
||||
(span :class (str "inline-flex items-center rounded-full border px-2 py-0.5 text-xs font-medium "
|
||||
(if (= outcome "passed") "border-emerald-300 bg-emerald-50 text-emerald-700"
|
||||
(if (= outcome "failed") "border-rose-300 bg-rose-50 text-rose-700"
|
||||
(if (= outcome "skipped") "border-sky-300 bg-sky-50 text-sky-700"
|
||||
"border-orange-300 bg-orange-50 text-orange-700"))))
|
||||
outcome))
|
||||
(div :class "rounded border border-stone-200 bg-white p-4 space-y-3"
|
||||
(h2 :class "text-lg font-mono font-semibold text-stone-800 break-all" nodeid)
|
||||
(div :class "flex gap-4 text-sm text-stone-500"
|
||||
(span (str "Duration: " duration "s")))
|
||||
(when longrepr
|
||||
(div :class "mt-4"
|
||||
(h3 :class "text-sm font-semibold text-rose-700 mb-2" "Error Output")
|
||||
(pre :class "bg-stone-50 border border-stone-200 rounded p-3 text-xs text-stone-700 overflow-x-auto whitespace-pre-wrap"
|
||||
longrepr))))))
|
||||
0
test-sx-web/sxc/__init__.py
Normal file
0
test-sx-web/sxc/__init__.py
Normal file
13
test-sx-web/sxc/pages/__init__.py
Normal file
13
test-sx-web/sxc/pages/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Test service defpage setup."""
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
def setup_test_pages() -> None:
|
||||
"""Load test page definitions."""
|
||||
_load_test_page_files()
|
||||
|
||||
|
||||
def _load_test_page_files() -> None:
|
||||
import os
|
||||
from shared.sx.pages import load_page_dir
|
||||
load_page_dir(os.path.dirname(__file__), "test")
|
||||
145
test-sx-web/sxc/pages/renders.py
Normal file
145
test-sx-web/sxc/pages/renders.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""Test service render functions — called from bp routes."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from shared.sx.jinja_bridge import load_service_components
|
||||
from shared.sx.helpers import sx_call, render_to_sx_with_env, full_page_sx
|
||||
|
||||
# Load test-specific .sx components at import time
|
||||
load_service_components(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
|
||||
|
||||
|
||||
def _format_time(ts: float | None) -> str:
|
||||
"""Format a unix timestamp for display."""
|
||||
if not ts:
|
||||
return "never"
|
||||
return datetime.fromtimestamp(ts).strftime("%-d %b %Y, %H:%M:%S")
|
||||
|
||||
|
||||
_FILTER_MAP = {
|
||||
"passed": "passed",
|
||||
"failed": "failed",
|
||||
"errors": "error",
|
||||
"skipped": "skipped",
|
||||
}
|
||||
|
||||
|
||||
def _filter_tests(tests: list[dict], active_filter: str | None,
|
||||
active_service: str | None) -> list[dict]:
|
||||
"""Filter tests by outcome and/or service."""
|
||||
from runner import _service_from_nodeid
|
||||
filtered = tests
|
||||
if active_filter and active_filter in _FILTER_MAP:
|
||||
outcome = _FILTER_MAP[active_filter]
|
||||
filtered = [t for t in filtered if t["outcome"] == outcome]
|
||||
if active_service:
|
||||
filtered = [t for t in filtered if _service_from_nodeid(t["nodeid"]) == active_service]
|
||||
return filtered
|
||||
|
||||
|
||||
def _service_list() -> list[str]:
|
||||
from runner import _SERVICE_ORDER
|
||||
return list(_SERVICE_ORDER)
|
||||
|
||||
|
||||
def _build_summary_data(result: dict | None, running: bool, csrf: str,
|
||||
active_filter: str | None) -> dict:
|
||||
"""Prepare summary data dict for the ~test-results-partial defcomp."""
|
||||
if running and not result:
|
||||
return dict(state="running", status="running", passed="0", failed="0",
|
||||
errors="0", skipped="0", total="0", duration="...",
|
||||
last_run="in progress", running=True, csrf=csrf,
|
||||
active_filter=active_filter)
|
||||
if not result:
|
||||
return dict(state="no-results", status=None, passed="0", failed="0",
|
||||
errors="0", skipped="0", total="0", duration="0",
|
||||
last_run="never", running=running, csrf=csrf,
|
||||
active_filter=active_filter)
|
||||
status = "running" if running else result["status"]
|
||||
return dict(
|
||||
state="running" if running else "has-results",
|
||||
status=status,
|
||||
passed=str(result["passed"]),
|
||||
failed=str(result["failed"]),
|
||||
errors=str(result["errors"]),
|
||||
skipped=str(result.get("skipped", 0)),
|
||||
total=str(result["total"]),
|
||||
duration=str(result["duration"]),
|
||||
last_run=_format_time(result["finished_at"]) if not running else "in progress",
|
||||
running=running, csrf=csrf,
|
||||
active_filter=active_filter,
|
||||
)
|
||||
|
||||
|
||||
async def test_detail_sx(test: dict) -> str:
|
||||
"""Return s-expression wire format for a test detail view."""
|
||||
return sx_call("test-detail-section", test=test)
|
||||
|
||||
|
||||
async def render_dashboard_page_sx(ctx: dict, result: dict | None,
|
||||
running: bool, csrf: str,
|
||||
active_filter: str | None = None,
|
||||
active_service: str | None = None) -> str:
|
||||
"""Full page: test dashboard (sx wire format)."""
|
||||
from runner import group_tests_by_service
|
||||
|
||||
summary_data = _build_summary_data(result, running, csrf, active_filter)
|
||||
sections = []
|
||||
has_failures = "false"
|
||||
if result and not running:
|
||||
tests = _filter_tests(result.get("tests", []), active_filter, active_service)
|
||||
if tests:
|
||||
sections = group_tests_by_service(tests)
|
||||
has_failures = str(result["failed"] > 0 or result["errors"] > 0).lower()
|
||||
else:
|
||||
summary_data["state"] = "empty-filtered"
|
||||
|
||||
inner = sx_call("test-results-partial",
|
||||
summary_data=summary_data, sections=sections, has_failures=has_failures)
|
||||
content = sx_call("test-results-wrap", running=running, inner=inner)
|
||||
hdr = await render_to_sx_with_env("test-layout-full", {},
|
||||
services=_service_list(),
|
||||
active_service=active_service,
|
||||
)
|
||||
return await full_page_sx(ctx, header_rows=hdr, content=content)
|
||||
|
||||
|
||||
async def render_results_partial_sx(result: dict | None, running: bool,
|
||||
csrf: str,
|
||||
active_filter: str | None = None,
|
||||
active_service: str | None = None) -> str:
|
||||
"""HTMX partial: results section (sx wire format)."""
|
||||
from runner import group_tests_by_service
|
||||
|
||||
summary_data = _build_summary_data(result, running, csrf, active_filter)
|
||||
sections = []
|
||||
has_failures = "false"
|
||||
if result and not running:
|
||||
tests = _filter_tests(result.get("tests", []), active_filter, active_service)
|
||||
if tests:
|
||||
sections = group_tests_by_service(tests)
|
||||
has_failures = str(result["failed"] > 0 or result["errors"] > 0).lower()
|
||||
else:
|
||||
summary_data["state"] = "empty-filtered"
|
||||
|
||||
inner = sx_call("test-results-partial",
|
||||
summary_data=summary_data, sections=sections, has_failures=has_failures)
|
||||
return sx_call("test-results-wrap", running=running, inner=inner)
|
||||
|
||||
|
||||
async def render_test_detail_page_sx(ctx: dict, test: dict) -> str:
|
||||
"""Full page: test detail (sx wire format)."""
|
||||
hdr = await render_to_sx_with_env("test-detail-layout-full", {},
|
||||
services=_service_list(),
|
||||
test_nodeid=test["nodeid"],
|
||||
test_label=test["nodeid"].rsplit("::", 1)[-1],
|
||||
)
|
||||
content = sx_call("test-detail",
|
||||
nodeid=test["nodeid"],
|
||||
outcome=test["outcome"],
|
||||
duration=str(test["duration"]),
|
||||
longrepr=test.get("longrepr", ""),
|
||||
)
|
||||
return await full_page_sx(ctx, header_rows=hdr, content=content)
|
||||
184
test-sx-web/test-signals.js
Normal file
184
test-sx-web/test-signals.js
Normal file
@@ -0,0 +1,184 @@
|
||||
const fs = require('fs');
|
||||
const src = fs.readFileSync('shared/static/scripts/sx-browser.js', 'utf8');
|
||||
|
||||
global.window = { addEventListener: function(){}, history: { pushState:function(){}, replaceState:function(){} }, location: { pathname:'/', search:'' } };
|
||||
global.document = {
|
||||
readyState: 'complete',
|
||||
createElement: function() { return { setAttribute:function(){}, appendChild:function(){}, style:{}, addEventListener:function(){} }; },
|
||||
createDocumentFragment: function() { return { appendChild:function(){}, childNodes:[] }; },
|
||||
createTextNode: function(t) { return { textContent: t, nodeType: 3 }; },
|
||||
createElementNS: function() { return { setAttribute:function(){}, appendChild:function(){} }; },
|
||||
head: { querySelector:function(){return null;}, appendChild:function(){} },
|
||||
body: { querySelectorAll:function(){return [];}, querySelector:function(){return null;}, getAttribute:function(){return null;} },
|
||||
querySelectorAll: function(){return [];},
|
||||
querySelector: function(){return null;},
|
||||
addEventListener: function(){},
|
||||
cookie: ''
|
||||
};
|
||||
global.navigator = { serviceWorker: { register: function() { return { then: function(f) { return { catch: function(){} }; } }; } } };
|
||||
global.localStorage = { getItem:function(){return null;}, setItem:function(){}, removeItem:function(){} };
|
||||
global.CustomEvent = function(n,o){ this.type=n; this.detail=(o||{}).detail; };
|
||||
global.MutationObserver = function(){ return { observe:function(){} }; };
|
||||
global.HTMLElement = function(){};
|
||||
global.EventSource = function(){};
|
||||
|
||||
// Prevent module.exports detection so it sets global.Sx
|
||||
var _module = module;
|
||||
module = undefined;
|
||||
var patchedSrc = fs.readFileSync('/tmp/sx-browser-patched.js', 'utf8');
|
||||
eval(patchedSrc);
|
||||
module = _module;
|
||||
var Sx = global.Sx;
|
||||
console.log('Sx loaded:', Sx ? true : false);
|
||||
|
||||
var env = Object.create(Sx.componentEnv);
|
||||
|
||||
// Test 1: computed with SX lambda
|
||||
try {
|
||||
var r = Sx.eval(Sx.parse('(let ((a (signal 3)) (b (computed (fn () (* 2 (deref a)))))) (deref b))')[0], env);
|
||||
console.log('TEST 1 computed:', r, '(expected 6)', r === 6 ? 'PASS' : 'FAIL');
|
||||
} catch(e) {
|
||||
console.log('TEST 1 computed ERROR:', e.message);
|
||||
}
|
||||
|
||||
// Test 2: swap! with dec
|
||||
try {
|
||||
var r2 = Sx.eval(Sx.parse('(let ((s (signal 10))) (swap! s dec) (deref s))')[0], env);
|
||||
console.log('TEST 2 swap!:', r2, '(expected 9)', r2 === 9 ? 'PASS' : 'FAIL');
|
||||
} catch(e) {
|
||||
console.log('TEST 2 swap! ERROR:', e.message);
|
||||
}
|
||||
|
||||
// Test 3: effect with SX lambda
|
||||
try {
|
||||
var r3 = Sx.eval(Sx.parse('(let ((s (signal 0)) (log (list)) (_e (effect (fn () (append! log (deref s)))))) (reset! s 1) (first log))')[0], env);
|
||||
console.log('TEST 3 effect:', r3, '(expected 0)', r3 === 0 ? 'PASS' : 'FAIL');
|
||||
} catch(e) {
|
||||
console.log('TEST 3 effect ERROR:', e.message);
|
||||
}
|
||||
|
||||
// Test 4: effect re-runs on change
|
||||
try {
|
||||
var r4 = Sx.eval(Sx.parse('(let ((s (signal 0)) (log (list)) (_e (effect (fn () (append! log (deref s)))))) (reset! s 5) (last log))')[0], env);
|
||||
console.log('TEST 4 effect re-run:', r4, '(expected 5)', r4 === 5 ? 'PASS' : 'FAIL');
|
||||
} catch(e) {
|
||||
console.log('TEST 4 effect re-run ERROR:', e.message);
|
||||
}
|
||||
|
||||
// Test 5: on-click handler lambda
|
||||
try {
|
||||
var r5 = Sx.eval(Sx.parse('(let ((s (signal 0)) (f (fn (e) (swap! s inc)))) (f nil) (f nil) (deref s))')[0], env);
|
||||
console.log('TEST 5 lambda call:', r5, '(expected 2)', r5 === 2 ? 'PASS' : 'FAIL');
|
||||
} catch(e) {
|
||||
console.log('TEST 5 lambda call ERROR:', e.message);
|
||||
}
|
||||
|
||||
// Test 6: defisland + renderToDom simulation
|
||||
try {
|
||||
// Override DOM stubs with tracking versions
|
||||
var listenCalls = [];
|
||||
// Override methods on the EXISTING document object (don't replace)
|
||||
document.createElement = function(tag) {
|
||||
var el = {
|
||||
tagName: tag,
|
||||
childNodes: [],
|
||||
style: {},
|
||||
setAttribute: function(k, v) { this['_attr_'+k] = v; },
|
||||
getAttribute: function(k) { return this['_attr_'+k]; },
|
||||
appendChild: function(c) { this.childNodes.push(c); return c; },
|
||||
addEventListener: function(name, fn) { listenCalls.push({el: this, name: name, fn: fn}); },
|
||||
removeEventListener: function() {},
|
||||
textContent: '',
|
||||
nodeType: 1
|
||||
};
|
||||
return el;
|
||||
};
|
||||
document.createTextNode = function(t) { return { textContent: String(t), nodeType: 3 }; };
|
||||
document.createDocumentFragment = function() {
|
||||
return {
|
||||
childNodes: [],
|
||||
appendChild: function(c) { if (c) this.childNodes.push(c); return c; },
|
||||
nodeType: 11
|
||||
};
|
||||
};
|
||||
|
||||
var env2 = Object.create(Sx.componentEnv);
|
||||
|
||||
// Define island
|
||||
Sx.eval(Sx.parse('(defisland ~test-click (&key initial) (let ((count (signal (or initial 0)))) (div (button :on-click (fn (e) (swap! count inc)) "Click") (span (deref count)))))')[0], env2);
|
||||
|
||||
// Patch domListen to trace calls
|
||||
// The domListen is inside the IIFE closure, so we can't patch it directly.
|
||||
// Instead, patch addEventListener on elements by wrapping createElement
|
||||
var origCE = document.createElement;
|
||||
document.createElement = function(tag) {
|
||||
var el = origCE(tag);
|
||||
var origAEL = el.addEventListener;
|
||||
el.addEventListener = function(name, fn) {
|
||||
console.log(' addEventListener called:', tag, name);
|
||||
listenCalls.push({el: el, name: name, fn: fn});
|
||||
origAEL.call(el, name, fn);
|
||||
};
|
||||
return el;
|
||||
};
|
||||
|
||||
// Temporarily hook into isCallable and domListen for debugging
|
||||
// We can't patch the closure vars directly, but we can test via eval
|
||||
var testLambda = Sx.eval(Sx.parse('(fn (e) e)')[0], env2);
|
||||
console.log(' lambda type:', typeof testLambda, testLambda ? testLambda._lambda : 'no _lambda');
|
||||
console.log(' Sx.isTruthy(lambda):', Sx.isTruthy(testLambda));
|
||||
|
||||
// Test what render-dom-element does with on-click
|
||||
// Simpler test: just a button with on-click, no island
|
||||
var parsed = Sx.parse('(button :on-click (fn (e) nil) "test")')[0];
|
||||
console.log(' parsed expr:', JSON.stringify(parsed, function(k,v) {
|
||||
if (v && v._sym) return 'SYM:' + v.name;
|
||||
if (v && v._kw) return 'KW:' + v.name;
|
||||
return v;
|
||||
}));
|
||||
var simpleTest = Sx.renderToDom(parsed, env2, null);
|
||||
console.log(' simple button rendered:', simpleTest ? simpleTest.tagName : 'null');
|
||||
console.log(' listeners after simple:', listenCalls.length);
|
||||
|
||||
// Render it
|
||||
var dom = Sx.renderToDom(Sx.parse('(~test-click :initial 0)')[0], env2, null);
|
||||
console.log('TEST 6 island rendered:', dom ? 'yes' : 'no');
|
||||
console.log(' listeners attached:', listenCalls.length);
|
||||
|
||||
if (listenCalls.length > 0) {
|
||||
var clickHandler = listenCalls[0];
|
||||
console.log(' event name:', clickHandler.name);
|
||||
// Simulate click
|
||||
clickHandler.fn({type: 'click'});
|
||||
// Find the span's text node
|
||||
var container = dom; // div[data-sx-island]
|
||||
var innerDiv = container.childNodes[0]; // the body div
|
||||
console.log(' container tag:', container.tagName);
|
||||
console.log(' container children:', container.childNodes.length);
|
||||
if (innerDiv && innerDiv.childNodes) {
|
||||
console.log(' innerDiv tag:', innerDiv.tagName);
|
||||
console.log(' innerDiv children:', innerDiv.childNodes.length);
|
||||
var button = innerDiv.childNodes[0];
|
||||
var span = innerDiv.childNodes[1];
|
||||
console.log(' button tag:', button ? button.tagName : 'none');
|
||||
console.log(' span tag:', span ? span.tagName : 'none');
|
||||
if (span && span.childNodes && span.childNodes[0]) {
|
||||
console.log(' span text BEFORE click effect:', span.childNodes[0].textContent);
|
||||
}
|
||||
}
|
||||
// Click again
|
||||
clickHandler.fn({type: 'click'});
|
||||
if (innerDiv && innerDiv.childNodes) {
|
||||
var span2 = innerDiv.childNodes[1];
|
||||
if (span2 && span2.childNodes && span2.childNodes[0]) {
|
||||
console.log(' span text AFTER 2 clicks:', span2.childNodes[0].textContent);
|
||||
console.log(' TEST 6:', span2.childNodes[0].textContent === '2' ? 'PASS' : 'FAIL (expected 2, got ' + span2.childNodes[0].textContent + ')');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.log(' TEST 6: FAIL (no listeners attached)');
|
||||
}
|
||||
} catch(e) {
|
||||
console.log('TEST 6 island ERROR:', e.message);
|
||||
console.log(e.stack.split('\n').slice(0,5).join('\n'));
|
||||
}
|
||||
Reference in New Issue
Block a user