Add test runner dashboard service (test.rose-ash.com)
Some checks failed
Build and Deploy / build-and-deploy (push) Has been cancelled
Some checks failed
Build and Deploy / build-and-deploy (push) Has been cancelled
Public Quart microservice that runs pytest against shared/tests/ and shared/sexp/tests/, serving an HTMX-powered sexp-rendered dashboard with pass/fail/running status, auto-refresh polling, and re-run button. No database — results stored in memory. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
60
test/Dockerfile
Normal file
60
test/Dockerfile
Normal file
@@ -0,0 +1,60 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM python:3.11-slim AS base
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PYTHONPATH=/app \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
APP_PORT=8000 \
|
||||
APP_MODULE=app:app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY shared/requirements.txt ./requirements.txt
|
||||
RUN pip install -r requirements.txt && \
|
||||
pip install pytest pytest-json-report
|
||||
|
||||
# Shared code (including tests)
|
||||
COPY shared/ ./shared/
|
||||
|
||||
# App code
|
||||
COPY test/ ./test-app-tmp/
|
||||
# Move service files into /app (flatten), but keep Dockerfile.* in place
|
||||
RUN cp -r test-app-tmp/app.py test-app-tmp/path_setup.py \
|
||||
test-app-tmp/bp test-app-tmp/sexp test-app-tmp/services \
|
||||
test-app-tmp/runner.py test-app-tmp/__init__.py ./ 2>/dev/null || true && \
|
||||
rm -rf test-app-tmp
|
||||
|
||||
# Sibling models for cross-domain SQLAlchemy imports
|
||||
COPY blog/__init__.py ./blog/__init__.py
|
||||
COPY blog/models/ ./blog/models/
|
||||
COPY market/__init__.py ./market/__init__.py
|
||||
COPY market/models/ ./market/models/
|
||||
COPY cart/__init__.py ./cart/__init__.py
|
||||
COPY cart/models/ ./cart/models/
|
||||
COPY events/__init__.py ./events/__init__.py
|
||||
COPY events/models/ ./events/models/
|
||||
COPY federation/__init__.py ./federation/__init__.py
|
||||
COPY federation/models/ ./federation/models/
|
||||
COPY account/__init__.py ./account/__init__.py
|
||||
COPY account/models/ ./account/models/
|
||||
COPY relations/__init__.py ./relations/__init__.py
|
||||
COPY relations/models/ ./relations/models/
|
||||
COPY likes/__init__.py ./likes/__init__.py
|
||||
COPY likes/models/ ./likes/models/
|
||||
COPY orders/__init__.py ./orders/__init__.py
|
||||
COPY orders/models/ ./orders/models/
|
||||
|
||||
COPY test/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
|
||||
RUN useradd -m -u 10001 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
EXPOSE ${APP_PORT}
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||
0
test/__init__.py
Normal file
0
test/__init__.py
Normal file
44
test/app.py
Normal file
44
test/app.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from __future__ import annotations
|
||||
import path_setup # noqa: F401
|
||||
import sexp.sexp_components as sexp_components # noqa: F401
|
||||
|
||||
from shared.infrastructure.factory import create_base_app
|
||||
|
||||
from bp import register_dashboard
|
||||
from services import register_domain_services
|
||||
|
||||
|
||||
async def test_context() -> dict:
|
||||
"""Test app context processor — minimal, no cross-service fragments."""
|
||||
from shared.infrastructure.context import base_context
|
||||
|
||||
ctx = await base_context()
|
||||
ctx["menu_items"] = []
|
||||
ctx["cart_mini_html"] = ""
|
||||
ctx["auth_menu_html"] = ""
|
||||
ctx["nav_tree_html"] = ""
|
||||
return ctx
|
||||
|
||||
|
||||
def create_app() -> "Quart":
|
||||
app = create_base_app(
|
||||
"test",
|
||||
context_fn=test_context,
|
||||
domain_services_fn=register_domain_services,
|
||||
)
|
||||
|
||||
import sexp.sexp_components # noqa: F401
|
||||
|
||||
app.register_blueprint(register_dashboard(url_prefix="/"))
|
||||
|
||||
# Run tests on startup
|
||||
@app.before_serving
|
||||
async def _run_tests_on_startup():
|
||||
import runner
|
||||
import asyncio
|
||||
asyncio.create_task(runner.run_tests())
|
||||
|
||||
return app
|
||||
|
||||
|
||||
app = create_app()
|
||||
1
test/bp/__init__.py
Normal file
1
test/bp/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .dashboard.routes import register as register_dashboard
|
||||
0
test/bp/dashboard/__init__.py
Normal file
0
test/bp/dashboard/__init__.py
Normal file
64
test/bp/dashboard/routes.py
Normal file
64
test/bp/dashboard/routes.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Test dashboard routes."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from quart import Blueprint, Response, make_response, request
|
||||
|
||||
|
||||
def register(url_prefix: str = "/") -> Blueprint:
|
||||
bp = Blueprint("dashboard", __name__, url_prefix=url_prefix)
|
||||
|
||||
@bp.get("/")
|
||||
async def index():
|
||||
"""Full page dashboard with last results."""
|
||||
from shared.sexp.page import get_template_context
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
from sexp.sexp_components import render_dashboard_page
|
||||
import runner
|
||||
|
||||
ctx = await get_template_context()
|
||||
result = runner.get_results()
|
||||
running = runner.is_running()
|
||||
csrf = generate_csrf_token()
|
||||
|
||||
html = await render_dashboard_page(ctx, result, running, csrf)
|
||||
return await make_response(html, 200)
|
||||
|
||||
@bp.post("/run")
|
||||
async def run():
|
||||
"""Trigger a test run, redirect to /."""
|
||||
import runner
|
||||
|
||||
if not runner.is_running():
|
||||
asyncio.create_task(runner.run_tests())
|
||||
|
||||
# HX-Redirect for HTMX, regular redirect for non-HTMX
|
||||
if request.headers.get("HX-Request"):
|
||||
resp = Response("", status=200)
|
||||
resp.headers["HX-Redirect"] = "/"
|
||||
return resp
|
||||
|
||||
from quart import redirect as qredirect
|
||||
return qredirect("/")
|
||||
|
||||
@bp.get("/results")
|
||||
async def results():
|
||||
"""HTMX partial — poll target for results table."""
|
||||
from shared.browser.app.csrf import generate_csrf_token
|
||||
from sexp.sexp_components import render_results_partial
|
||||
import runner
|
||||
|
||||
result = runner.get_results()
|
||||
running = runner.is_running()
|
||||
csrf = generate_csrf_token()
|
||||
|
||||
html = await render_results_partial(result, running, csrf)
|
||||
|
||||
resp = Response(html, status=200, content_type="text/html")
|
||||
# If still running, tell HTMX to keep polling
|
||||
if running:
|
||||
resp.headers["HX-Trigger-After-Swap"] = "test-still-running"
|
||||
return resp
|
||||
|
||||
return bp
|
||||
24
test/entrypoint.sh
Executable file
24
test/entrypoint.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# No database — skip DB wait and migrations
|
||||
|
||||
# Clear Redis page cache on deploy
|
||||
if [[ -n "${REDIS_URL:-}" && "${REDIS_URL}" != "no" ]]; then
|
||||
python3 -c "
|
||||
import redis, os
|
||||
r = redis.from_url(os.environ['REDIS_URL'])
|
||||
r.flushdb()
|
||||
" || echo "Redis flush failed (non-fatal), continuing..."
|
||||
fi
|
||||
|
||||
# Start the app
|
||||
RELOAD_FLAG=""
|
||||
if [[ "${RELOAD:-}" == "true" ]]; then
|
||||
RELOAD_FLAG="--reload"
|
||||
fi
|
||||
PYTHONUNBUFFERED=1 exec hypercorn "${APP_MODULE:-app:app}" \
|
||||
--bind 0.0.0.0:${PORT:-8000} \
|
||||
--workers ${WORKERS:-1} \
|
||||
--keep-alive 75 \
|
||||
${RELOAD_FLAG}
|
||||
9
test/path_setup.py
Normal file
9
test/path_setup.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
_app_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
_project_root = os.path.dirname(_app_dir)
|
||||
|
||||
for _p in (_project_root, _app_dir):
|
||||
if _p not in sys.path:
|
||||
sys.path.insert(0, _p)
|
||||
135
test/runner.py
Normal file
135
test/runner.py
Normal file
@@ -0,0 +1,135 @@
|
||||
"""Pytest subprocess runner + in-memory result storage."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# In-memory state
|
||||
_last_result: dict | None = None
|
||||
_running: bool = False
|
||||
|
||||
# Paths to test directories (relative to /app in Docker)
|
||||
_TEST_DIRS = [
|
||||
"shared/tests/",
|
||||
"shared/sexp/tests/",
|
||||
]
|
||||
|
||||
_REPORT_PATH = "/tmp/test-report.json"
|
||||
|
||||
|
||||
async def run_tests() -> dict:
|
||||
"""Run pytest in subprocess, parse JSON report, store results."""
|
||||
global _last_result, _running
|
||||
|
||||
if _running:
|
||||
return {"status": "already_running"}
|
||||
|
||||
_running = True
|
||||
started_at = time.time()
|
||||
|
||||
try:
|
||||
cmd = [
|
||||
"python3", "-m", "pytest",
|
||||
*_TEST_DIRS,
|
||||
"--json-report",
|
||||
f"--json-report-file={_REPORT_PATH}",
|
||||
"-q",
|
||||
"--tb=short",
|
||||
]
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.STDOUT,
|
||||
cwd="/app",
|
||||
)
|
||||
stdout, _ = await proc.communicate()
|
||||
finished_at = time.time()
|
||||
|
||||
# Parse JSON report
|
||||
report_path = Path(_REPORT_PATH)
|
||||
if report_path.exists():
|
||||
try:
|
||||
report = json.loads(report_path.read_text())
|
||||
except (json.JSONDecodeError, OSError):
|
||||
report = {}
|
||||
else:
|
||||
report = {}
|
||||
|
||||
summary = report.get("summary", {})
|
||||
tests_raw = report.get("tests", [])
|
||||
|
||||
tests = []
|
||||
for t in tests_raw:
|
||||
tests.append({
|
||||
"nodeid": t.get("nodeid", ""),
|
||||
"outcome": t.get("outcome", "unknown"),
|
||||
"duration": round(t.get("duration", 0), 4),
|
||||
"longrepr": (t.get("call", {}) or {}).get("longrepr", ""),
|
||||
})
|
||||
|
||||
passed = summary.get("passed", 0)
|
||||
failed = summary.get("failed", 0)
|
||||
errors = summary.get("error", 0)
|
||||
skipped = summary.get("skipped", 0)
|
||||
total = summary.get("total", len(tests))
|
||||
|
||||
if failed > 0 or errors > 0:
|
||||
status = "failed"
|
||||
else:
|
||||
status = "passed"
|
||||
|
||||
_last_result = {
|
||||
"status": status,
|
||||
"started_at": started_at,
|
||||
"finished_at": finished_at,
|
||||
"duration": round(finished_at - started_at, 2),
|
||||
"passed": passed,
|
||||
"failed": failed,
|
||||
"errors": errors,
|
||||
"skipped": skipped,
|
||||
"total": total,
|
||||
"tests": tests,
|
||||
"stdout": (stdout or b"").decode("utf-8", errors="replace")[-5000:],
|
||||
}
|
||||
|
||||
log.info(
|
||||
"Test run complete: %s (%d passed, %d failed, %d errors, %.1fs)",
|
||||
status, passed, failed, errors, _last_result["duration"],
|
||||
)
|
||||
return _last_result
|
||||
|
||||
except Exception:
|
||||
log.exception("Test run failed")
|
||||
finished_at = time.time()
|
||||
_last_result = {
|
||||
"status": "error",
|
||||
"started_at": started_at,
|
||||
"finished_at": finished_at,
|
||||
"duration": round(finished_at - started_at, 2),
|
||||
"passed": 0,
|
||||
"failed": 0,
|
||||
"errors": 1,
|
||||
"skipped": 0,
|
||||
"total": 0,
|
||||
"tests": [],
|
||||
"stdout": "",
|
||||
}
|
||||
return _last_result
|
||||
finally:
|
||||
_running = False
|
||||
|
||||
|
||||
def get_results() -> dict | None:
|
||||
"""Return last run results."""
|
||||
return _last_result
|
||||
|
||||
|
||||
def is_running() -> bool:
|
||||
"""Check if tests are currently running."""
|
||||
return _running
|
||||
6
test/services/__init__.py
Normal file
6
test/services/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Test app service registration."""
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
def register_domain_services() -> None:
|
||||
"""Register services for the test app (none needed)."""
|
||||
0
test/sexp/__init__.py
Normal file
0
test/sexp/__init__.py
Normal file
88
test/sexp/dashboard.sexpr
Normal file
88
test/sexp/dashboard.sexpr
Normal file
@@ -0,0 +1,88 @@
|
||||
;; Test dashboard components
|
||||
|
||||
(defcomp ~test-status-badge (&key status)
|
||||
(span :class (str "inline-flex items-center rounded-full border px-3 py-1 text-sm font-medium "
|
||||
(if (= status "running") "border-amber-300 bg-amber-50 text-amber-700 animate-pulse"
|
||||
(if (= status "passed") "border-emerald-300 bg-emerald-50 text-emerald-700"
|
||||
(if (= status "failed") "border-rose-300 bg-rose-50 text-rose-700"
|
||||
"border-stone-300 bg-stone-50 text-stone-700"))))
|
||||
status))
|
||||
|
||||
(defcomp ~test-run-button (&key running csrf)
|
||||
(form :method "POST" :action "/run" :class "inline"
|
||||
(input :type "hidden" :name "csrf_token" :value csrf)
|
||||
(button :type "submit"
|
||||
:class (str "rounded bg-stone-800 px-4 py-2 text-sm font-medium text-white hover:bg-stone-700 "
|
||||
"disabled:opacity-50 disabled:cursor-not-allowed transition-colors")
|
||||
:disabled (if running "true" nil)
|
||||
(if running "Running..." "Run Tests"))))
|
||||
|
||||
(defcomp ~test-summary (&key status passed failed errors skipped total duration last-run running csrf)
|
||||
(div :class "space-y-4"
|
||||
(div :class "flex items-center justify-between flex-wrap gap-3"
|
||||
(div :class "flex items-center gap-3"
|
||||
(h2 :class "text-lg font-semibold text-stone-800" "Test Results")
|
||||
(when status (~test-status-badge :status status)))
|
||||
(~test-run-button :running running :csrf csrf))
|
||||
(when status
|
||||
(div :class "grid grid-cols-2 sm:grid-cols-3 md:grid-cols-6 gap-3"
|
||||
(div :class "rounded border border-stone-200 bg-white p-3 text-center"
|
||||
(div :class "text-2xl font-bold text-stone-800" total)
|
||||
(div :class "text-xs text-stone-500" "Total"))
|
||||
(div :class "rounded border border-emerald-200 bg-emerald-50 p-3 text-center"
|
||||
(div :class "text-2xl font-bold text-emerald-700" passed)
|
||||
(div :class "text-xs text-emerald-600" "Passed"))
|
||||
(div :class "rounded border border-rose-200 bg-rose-50 p-3 text-center"
|
||||
(div :class "text-2xl font-bold text-rose-700" failed)
|
||||
(div :class "text-xs text-rose-600" "Failed"))
|
||||
(div :class "rounded border border-orange-200 bg-orange-50 p-3 text-center"
|
||||
(div :class "text-2xl font-bold text-orange-700" errors)
|
||||
(div :class "text-xs text-orange-600" "Errors"))
|
||||
(div :class "rounded border border-sky-200 bg-sky-50 p-3 text-center"
|
||||
(div :class "text-2xl font-bold text-sky-700" skipped)
|
||||
(div :class "text-xs text-sky-600" "Skipped"))
|
||||
(div :class "rounded border border-stone-200 bg-white p-3 text-center"
|
||||
(div :class "text-2xl font-bold text-stone-800" (str duration "s"))
|
||||
(div :class "text-xs text-stone-500" "Duration")))
|
||||
(div :class "text-xs text-stone-400" (str "Last run: " last-run)))))
|
||||
|
||||
(defcomp ~test-row (&key nodeid outcome duration longrepr)
|
||||
(tr :class (str "border-b border-stone-100 "
|
||||
(if (= outcome "passed") "bg-white"
|
||||
(if (= outcome "failed") "bg-rose-50"
|
||||
(if (= outcome "skipped") "bg-sky-50"
|
||||
"bg-orange-50"))))
|
||||
(td :class "px-3 py-2 text-xs font-mono text-stone-700 max-w-0 truncate" :title nodeid nodeid)
|
||||
(td :class "px-3 py-2 text-center"
|
||||
(span :class (str "inline-flex items-center rounded-full border px-2 py-0.5 text-[11px] font-medium "
|
||||
(if (= outcome "passed") "border-emerald-300 bg-emerald-50 text-emerald-700"
|
||||
(if (= outcome "failed") "border-rose-300 bg-rose-50 text-rose-700"
|
||||
(if (= outcome "skipped") "border-sky-300 bg-sky-50 text-sky-700"
|
||||
"border-orange-300 bg-orange-50 text-orange-700"))))
|
||||
outcome))
|
||||
(td :class "px-3 py-2 text-right text-xs text-stone-500 tabular-nums" (str duration "s"))
|
||||
(td :class "px-3 py-2 text-xs text-rose-600 font-mono max-w-xs truncate" :title longrepr
|
||||
(when longrepr longrepr))))
|
||||
|
||||
(defcomp ~test-results-table (&key rows-html has-failures)
|
||||
(div :class "overflow-x-auto rounded border border-stone-200 bg-white"
|
||||
(table :class "w-full text-left"
|
||||
(thead
|
||||
(tr :class "border-b border-stone-200 bg-stone-50"
|
||||
(th :class "px-3 py-2 text-xs font-medium text-stone-600" "Test")
|
||||
(th :class "px-3 py-2 text-xs font-medium text-stone-600 text-center w-24" "Status")
|
||||
(th :class "px-3 py-2 text-xs font-medium text-stone-600 text-right w-20" "Time")
|
||||
(th :class "px-3 py-2 text-xs font-medium text-stone-600 w-48" "Error")))
|
||||
(tbody rows-html))))
|
||||
|
||||
(defcomp ~test-running-indicator ()
|
||||
(div :class "flex items-center justify-center py-12 text-stone-500"
|
||||
(div :class "flex items-center gap-3"
|
||||
(div :class "animate-spin h-6 w-6 border-2 border-stone-300 border-t-stone-600 rounded-full")
|
||||
(span :class "text-sm" "Running tests..."))))
|
||||
|
||||
(defcomp ~test-no-results ()
|
||||
(div :class "flex items-center justify-center py-12 text-stone-400"
|
||||
(div :class "text-center"
|
||||
(div :class "text-4xl mb-2" "?")
|
||||
(div :class "text-sm" "No test results yet. Click \"Run Tests\" to start."))))
|
||||
105
test/sexp/sexp_components.py
Normal file
105
test/sexp/sexp_components.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""Test service s-expression page components."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from shared.sexp.jinja_bridge import render, load_service_components
|
||||
from shared.sexp.helpers import root_header_html, full_page
|
||||
|
||||
# Load test-specific .sexpr components at import time
|
||||
load_service_components(os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
|
||||
def _format_time(ts: float | None) -> str:
|
||||
"""Format a unix timestamp for display."""
|
||||
if not ts:
|
||||
return "never"
|
||||
return datetime.fromtimestamp(ts).strftime("%-d %b %Y, %H:%M:%S")
|
||||
|
||||
|
||||
def _test_rows_html(tests: list[dict]) -> str:
|
||||
"""Render all test result rows."""
|
||||
parts = []
|
||||
for t in tests:
|
||||
parts.append(render(
|
||||
"test-row",
|
||||
nodeid=t["nodeid"],
|
||||
outcome=t["outcome"],
|
||||
duration=str(t["duration"]),
|
||||
longrepr=t.get("longrepr", ""),
|
||||
))
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
def _results_partial_html(result: dict | None, running: bool, csrf: str) -> str:
|
||||
"""Render the results section (summary + table or running indicator)."""
|
||||
if running and not result:
|
||||
summary = render(
|
||||
"test-summary",
|
||||
status="running", passed="0", failed="0", errors="0",
|
||||
skipped="0", total="0", duration="...",
|
||||
last_run="in progress", running=True, csrf=csrf,
|
||||
)
|
||||
return summary + render("test-running-indicator")
|
||||
|
||||
if not result:
|
||||
summary = render(
|
||||
"test-summary",
|
||||
status=None, passed="0", failed="0", errors="0",
|
||||
skipped="0", total="0", duration="0",
|
||||
last_run="never", running=running, csrf=csrf,
|
||||
)
|
||||
return summary + render("test-no-results")
|
||||
|
||||
status = "running" if running else result["status"]
|
||||
summary = render(
|
||||
"test-summary",
|
||||
status=status,
|
||||
passed=str(result["passed"]),
|
||||
failed=str(result["failed"]),
|
||||
errors=str(result["errors"]),
|
||||
skipped=str(result.get("skipped", 0)),
|
||||
total=str(result["total"]),
|
||||
duration=str(result["duration"]),
|
||||
last_run=_format_time(result["finished_at"]) if not running else "in progress",
|
||||
running=running,
|
||||
csrf=csrf,
|
||||
)
|
||||
|
||||
if running:
|
||||
return summary + render("test-running-indicator")
|
||||
|
||||
tests = result.get("tests", [])
|
||||
if not tests:
|
||||
return summary + render("test-no-results")
|
||||
|
||||
has_failures = result["failed"] > 0 or result["errors"] > 0
|
||||
rows = _test_rows_html(tests)
|
||||
table = render("test-results-table", rows_html=rows,
|
||||
has_failures=str(has_failures).lower())
|
||||
return summary + table
|
||||
|
||||
|
||||
def _wrap_results_div(inner_html: str, running: bool) -> str:
|
||||
"""Wrap results in a div with HTMX polling when running."""
|
||||
attrs = 'id="test-results" class="space-y-6 p-4"'
|
||||
if running:
|
||||
attrs += ' hx-get="/results" hx-trigger="every 2s" hx-swap="outerHTML"'
|
||||
return f'<div {attrs}>{inner_html}</div>'
|
||||
|
||||
|
||||
async def render_dashboard_page(ctx: dict, result: dict | None,
|
||||
running: bool, csrf: str) -> str:
|
||||
"""Full page: test dashboard."""
|
||||
hdr = root_header_html(ctx)
|
||||
inner = _results_partial_html(result, running, csrf)
|
||||
content = _wrap_results_div(inner, running)
|
||||
return full_page(ctx, header_rows_html=hdr, content_html=content)
|
||||
|
||||
|
||||
async def render_results_partial(result: dict | None, running: bool,
|
||||
csrf: str) -> str:
|
||||
"""HTMX partial: just the results section (wrapped in polling div)."""
|
||||
inner = _results_partial_html(result, running, csrf)
|
||||
return _wrap_results_div(inner, running)
|
||||
Reference in New Issue
Block a user