Compare commits
6 Commits
1a5969202e
...
4ede0368dc
| Author | SHA1 | Date | |
|---|---|---|---|
| 4ede0368dc | |||
| a8e06e87fb | |||
| 588d240ddc | |||
| aa5c251a45 | |||
| 7ccb463a8b | |||
| 341fc4cf28 |
@@ -5,7 +5,6 @@ Cooperative web platform: federated content, commerce, events, and media process
|
|||||||
## Deployment
|
## Deployment
|
||||||
|
|
||||||
- **Do NOT push** until explicitly told to. Pushes reload code to dev automatically.
|
- **Do NOT push** until explicitly told to. Pushes reload code to dev automatically.
|
||||||
- **Cache busting:** After editing `sx.js`, bump the `?v=` query string in `shared/sx/helpers.py` (search for `sx.js?v=`).
|
|
||||||
|
|
||||||
## Project Structure
|
## Project Structure
|
||||||
|
|
||||||
|
|||||||
20
blog/alembic/versions/0005_add_sx_content.py
Normal file
20
blog/alembic/versions/0005_add_sx_content.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
"""Add sx_content column to posts table.
|
||||||
|
|
||||||
|
Revision ID: blog_0005
|
||||||
|
Revises: blog_0004
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
revision = "blog_0005"
|
||||||
|
down_revision = "blog_0004"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column("posts", sa.Column("sx_content", sa.Text(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column("posts", "sx_content")
|
||||||
441
blog/bp/blog/ghost/lexical_to_sx.py
Normal file
441
blog/bp/blog/ghost/lexical_to_sx.py
Normal file
@@ -0,0 +1,441 @@
|
|||||||
|
"""
|
||||||
|
Lexical JSON → s-expression converter.
|
||||||
|
|
||||||
|
Mirrors lexical_renderer.py's registry/dispatch pattern but produces sx source
|
||||||
|
instead of HTML. Used for backfilling existing posts and on-the-fly conversion
|
||||||
|
when editing pre-migration posts in the SX editor.
|
||||||
|
|
||||||
|
Public API
|
||||||
|
----------
|
||||||
|
lexical_to_sx(doc) – Lexical JSON (dict or string) → sx source string
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
import mistune
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Registry
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
_CONVERTERS: dict[str, Callable[[dict], str]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _converter(node_type: str):
|
||||||
|
"""Decorator — register a function as the converter for *node_type*."""
|
||||||
|
def decorator(fn: Callable[[dict], str]) -> Callable[[dict], str]:
|
||||||
|
_CONVERTERS[node_type] = fn
|
||||||
|
return fn
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Public entry point
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def lexical_to_sx(doc: dict | str) -> str:
|
||||||
|
"""Convert a Lexical JSON document to an sx source string."""
|
||||||
|
if isinstance(doc, str):
|
||||||
|
doc = json.loads(doc)
|
||||||
|
root = doc.get("root", doc)
|
||||||
|
children = root.get("children", [])
|
||||||
|
parts = [_convert_node(c) for c in children]
|
||||||
|
parts = [p for p in parts if p]
|
||||||
|
if not parts:
|
||||||
|
return '(<> (p ""))'
|
||||||
|
if len(parts) == 1:
|
||||||
|
return parts[0]
|
||||||
|
return "(<>\n " + "\n ".join(parts) + ")"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Core dispatch
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _convert_node(node: dict) -> str:
|
||||||
|
node_type = node.get("type", "")
|
||||||
|
converter = _CONVERTERS.get(node_type)
|
||||||
|
if converter:
|
||||||
|
return converter(node)
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_children(children: list[dict]) -> str:
|
||||||
|
"""Convert children to inline sx content (for text nodes)."""
|
||||||
|
parts = [_convert_node(c) for c in children]
|
||||||
|
return " ".join(p for p in parts if p)
|
||||||
|
|
||||||
|
|
||||||
|
def _esc(s: str) -> str:
|
||||||
|
"""Escape a string for sx double-quoted literals."""
|
||||||
|
return s.replace("\\", "\\\\").replace('"', '\\"')
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Text format bitmask
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
_FORMAT_BOLD = 1
|
||||||
|
_FORMAT_ITALIC = 2
|
||||||
|
_FORMAT_STRIKETHROUGH = 4
|
||||||
|
_FORMAT_UNDERLINE = 8
|
||||||
|
_FORMAT_CODE = 16
|
||||||
|
_FORMAT_SUBSCRIPT = 32
|
||||||
|
_FORMAT_SUPERSCRIPT = 64
|
||||||
|
|
||||||
|
_FORMAT_WRAPPERS: list[tuple[int, str]] = [
|
||||||
|
(_FORMAT_BOLD, "strong"),
|
||||||
|
(_FORMAT_ITALIC, "em"),
|
||||||
|
(_FORMAT_STRIKETHROUGH, "s"),
|
||||||
|
(_FORMAT_UNDERLINE, "u"),
|
||||||
|
(_FORMAT_CODE, "code"),
|
||||||
|
(_FORMAT_SUBSCRIPT, "sub"),
|
||||||
|
(_FORMAT_SUPERSCRIPT, "sup"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _wrap_format(text_sx: str, fmt: int) -> str:
|
||||||
|
for mask, tag in _FORMAT_WRAPPERS:
|
||||||
|
if fmt & mask:
|
||||||
|
text_sx = f"({tag} {text_sx})"
|
||||||
|
return text_sx
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tier 1 — text nodes
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@_converter("text")
|
||||||
|
def _text(node: dict) -> str:
|
||||||
|
text = node.get("text", "")
|
||||||
|
if not text:
|
||||||
|
return ""
|
||||||
|
sx = f'"{_esc(text)}"'
|
||||||
|
fmt = node.get("format", 0)
|
||||||
|
if isinstance(fmt, int) and fmt:
|
||||||
|
sx = _wrap_format(sx, fmt)
|
||||||
|
return sx
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("linebreak")
|
||||||
|
def _linebreak(_node: dict) -> str:
|
||||||
|
return '"\\n"'
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("tab")
|
||||||
|
def _tab(_node: dict) -> str:
|
||||||
|
return '"\\t"'
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("paragraph")
|
||||||
|
def _paragraph(node: dict) -> str:
|
||||||
|
inner = _convert_children(node.get("children", []))
|
||||||
|
if not inner:
|
||||||
|
inner = '""'
|
||||||
|
return f"(p {inner})"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("extended-text")
|
||||||
|
def _extended_text(node: dict) -> str:
|
||||||
|
# extended-text can be block-level (with children) or inline (with text).
|
||||||
|
# When it has a "text" field, treat it as a plain text node.
|
||||||
|
if "text" in node:
|
||||||
|
return _text(node)
|
||||||
|
return _paragraph(node)
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("heading")
|
||||||
|
def _heading(node: dict) -> str:
|
||||||
|
tag = node.get("tag", "h2")
|
||||||
|
inner = _convert_children(node.get("children", []))
|
||||||
|
if not inner:
|
||||||
|
inner = '""'
|
||||||
|
return f"({tag} {inner})"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("extended-heading")
|
||||||
|
def _extended_heading(node: dict) -> str:
|
||||||
|
if "text" in node:
|
||||||
|
return _text(node)
|
||||||
|
return _heading(node)
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("quote")
|
||||||
|
def _quote(node: dict) -> str:
|
||||||
|
inner = _convert_children(node.get("children", []))
|
||||||
|
return f"(blockquote {inner})" if inner else '(blockquote "")'
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("extended-quote")
|
||||||
|
def _extended_quote(node: dict) -> str:
|
||||||
|
if "text" in node:
|
||||||
|
return _text(node)
|
||||||
|
return _quote(node)
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("link")
|
||||||
|
def _link(node: dict) -> str:
|
||||||
|
href = node.get("url", "")
|
||||||
|
inner = _convert_children(node.get("children", []))
|
||||||
|
if not inner:
|
||||||
|
inner = f'"{_esc(href)}"'
|
||||||
|
return f'(a :href "{_esc(href)}" {inner})'
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("autolink")
|
||||||
|
def _autolink(node: dict) -> str:
|
||||||
|
return _link(node)
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("at-link")
|
||||||
|
def _at_link(node: dict) -> str:
|
||||||
|
return _link(node)
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("list")
|
||||||
|
def _list(node: dict) -> str:
|
||||||
|
tag = "ol" if node.get("listType") == "number" else "ul"
|
||||||
|
inner = _convert_children(node.get("children", []))
|
||||||
|
return f"({tag} {inner})" if inner else f"({tag})"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("listitem")
|
||||||
|
def _listitem(node: dict) -> str:
|
||||||
|
inner = _convert_children(node.get("children", []))
|
||||||
|
return f"(li {inner})" if inner else '(li "")'
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("horizontalrule")
|
||||||
|
def _horizontalrule(_node: dict) -> str:
|
||||||
|
return "(hr)"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("code")
|
||||||
|
def _code(node: dict) -> str:
|
||||||
|
inner = _convert_children(node.get("children", []))
|
||||||
|
return f"(code {inner})" if inner else ""
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("codeblock")
|
||||||
|
def _codeblock(node: dict) -> str:
|
||||||
|
lang = node.get("language", "")
|
||||||
|
code = node.get("code", "")
|
||||||
|
lang_attr = f' :class "language-{_esc(lang)}"' if lang else ""
|
||||||
|
return f'(pre (code{lang_attr} "{_esc(code)}"))'
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("code-highlight")
|
||||||
|
def _code_highlight(node: dict) -> str:
|
||||||
|
text = node.get("text", "")
|
||||||
|
return f'"{_esc(text)}"' if text else ""
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tier 2 — common cards
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@_converter("image")
|
||||||
|
def _image(node: dict) -> str:
|
||||||
|
src = node.get("src", "")
|
||||||
|
alt = node.get("alt", "")
|
||||||
|
caption = node.get("caption", "")
|
||||||
|
width = node.get("cardWidth", "") or node.get("width", "")
|
||||||
|
href = node.get("href", "")
|
||||||
|
|
||||||
|
parts = [f':src "{_esc(src)}"']
|
||||||
|
if alt:
|
||||||
|
parts.append(f':alt "{_esc(alt)}"')
|
||||||
|
if caption:
|
||||||
|
parts.append(f':caption "{_esc(caption)}"')
|
||||||
|
if width:
|
||||||
|
parts.append(f':width "{_esc(width)}"')
|
||||||
|
if href:
|
||||||
|
parts.append(f':href "{_esc(href)}"')
|
||||||
|
return "(~kg-image " + " ".join(parts) + ")"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("gallery")
|
||||||
|
def _gallery(node: dict) -> str:
|
||||||
|
images = node.get("images", [])
|
||||||
|
if not images:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
# Group images into rows of 3 (matching lexical_renderer.py)
|
||||||
|
rows = []
|
||||||
|
for i in range(0, len(images), 3):
|
||||||
|
row_imgs = images[i:i + 3]
|
||||||
|
row_items = []
|
||||||
|
for img in row_imgs:
|
||||||
|
item_parts = [f'"src" "{_esc(img.get("src", ""))}"']
|
||||||
|
if img.get("alt"):
|
||||||
|
item_parts.append(f'"alt" "{_esc(img["alt"])}"')
|
||||||
|
if img.get("caption"):
|
||||||
|
item_parts.append(f'"caption" "{_esc(img["caption"])}"')
|
||||||
|
row_items.append("(dict " + " ".join(item_parts) + ")")
|
||||||
|
rows.append("(list " + " ".join(row_items) + ")")
|
||||||
|
|
||||||
|
images_sx = "(list " + " ".join(rows) + ")"
|
||||||
|
caption = node.get("caption", "")
|
||||||
|
caption_attr = f' :caption "{_esc(caption)}"' if caption else ""
|
||||||
|
return f"(~kg-gallery :images {images_sx}{caption_attr})"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("html")
|
||||||
|
def _html_card(node: dict) -> str:
|
||||||
|
raw = node.get("html", "")
|
||||||
|
return f'(~kg-html :html "{_esc(raw)}")'
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("embed")
|
||||||
|
def _embed(node: dict) -> str:
|
||||||
|
embed_html = node.get("html", "")
|
||||||
|
caption = node.get("caption", "")
|
||||||
|
parts = [f':html "{_esc(embed_html)}"']
|
||||||
|
if caption:
|
||||||
|
parts.append(f':caption "{_esc(caption)}"')
|
||||||
|
return "(~kg-embed " + " ".join(parts) + ")"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("bookmark")
|
||||||
|
def _bookmark(node: dict) -> str:
|
||||||
|
url = node.get("url", "")
|
||||||
|
meta = node.get("metadata", {})
|
||||||
|
parts = [f':url "{_esc(url)}"']
|
||||||
|
|
||||||
|
title = meta.get("title", "") or node.get("title", "")
|
||||||
|
if title:
|
||||||
|
parts.append(f':title "{_esc(title)}"')
|
||||||
|
desc = meta.get("description", "") or node.get("description", "")
|
||||||
|
if desc:
|
||||||
|
parts.append(f':description "{_esc(desc)}"')
|
||||||
|
icon = meta.get("icon", "") or node.get("icon", "")
|
||||||
|
if icon:
|
||||||
|
parts.append(f':icon "{_esc(icon)}"')
|
||||||
|
author = meta.get("author", "") or node.get("author", "")
|
||||||
|
if author:
|
||||||
|
parts.append(f':author "{_esc(author)}"')
|
||||||
|
publisher = meta.get("publisher", "") or node.get("publisher", "")
|
||||||
|
if publisher:
|
||||||
|
parts.append(f':publisher "{_esc(publisher)}"')
|
||||||
|
thumbnail = meta.get("thumbnail", "") or node.get("thumbnail", "")
|
||||||
|
if thumbnail:
|
||||||
|
parts.append(f':thumbnail "{_esc(thumbnail)}"')
|
||||||
|
caption = node.get("caption", "")
|
||||||
|
if caption:
|
||||||
|
parts.append(f':caption "{_esc(caption)}"')
|
||||||
|
|
||||||
|
return "(~kg-bookmark " + " ".join(parts) + ")"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("callout")
|
||||||
|
def _callout(node: dict) -> str:
|
||||||
|
color = node.get("backgroundColor", "grey")
|
||||||
|
emoji = node.get("calloutEmoji", "")
|
||||||
|
inner = _convert_children(node.get("children", []))
|
||||||
|
|
||||||
|
parts = [f':color "{_esc(color)}"']
|
||||||
|
if emoji:
|
||||||
|
parts.append(f':emoji "{_esc(emoji)}"')
|
||||||
|
if inner:
|
||||||
|
parts.append(f':content {inner}')
|
||||||
|
return "(~kg-callout " + " ".join(parts) + ")"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("button")
|
||||||
|
def _button(node: dict) -> str:
|
||||||
|
text = node.get("buttonText", "")
|
||||||
|
url = node.get("buttonUrl", "")
|
||||||
|
alignment = node.get("alignment", "center")
|
||||||
|
return f'(~kg-button :url "{_esc(url)}" :text "{_esc(text)}" :alignment "{_esc(alignment)}")'
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("toggle")
|
||||||
|
def _toggle(node: dict) -> str:
|
||||||
|
heading = node.get("heading", "")
|
||||||
|
inner = _convert_children(node.get("children", []))
|
||||||
|
content_attr = f" :content {inner}" if inner else ""
|
||||||
|
return f'(~kg-toggle :heading "{_esc(heading)}"{content_attr})'
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("audio")
|
||||||
|
def _audio(node: dict) -> str:
|
||||||
|
src = node.get("src", "")
|
||||||
|
title = node.get("title", "")
|
||||||
|
duration = node.get("duration", 0)
|
||||||
|
thumbnail = node.get("thumbnailSrc", "")
|
||||||
|
|
||||||
|
duration_min = int(duration) // 60
|
||||||
|
duration_sec = int(duration) % 60
|
||||||
|
duration_str = f"{duration_min}:{duration_sec:02d}"
|
||||||
|
|
||||||
|
parts = [f':src "{_esc(src)}"']
|
||||||
|
if title:
|
||||||
|
parts.append(f':title "{_esc(title)}"')
|
||||||
|
parts.append(f':duration "{duration_str}"')
|
||||||
|
if thumbnail:
|
||||||
|
parts.append(f':thumbnail "{_esc(thumbnail)}"')
|
||||||
|
return "(~kg-audio " + " ".join(parts) + ")"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("video")
|
||||||
|
def _video(node: dict) -> str:
|
||||||
|
src = node.get("src", "")
|
||||||
|
caption = node.get("caption", "")
|
||||||
|
width = node.get("cardWidth", "")
|
||||||
|
thumbnail = node.get("thumbnailSrc", "") or node.get("customThumbnailSrc", "")
|
||||||
|
loop = node.get("loop", False)
|
||||||
|
|
||||||
|
parts = [f':src "{_esc(src)}"']
|
||||||
|
if caption:
|
||||||
|
parts.append(f':caption "{_esc(caption)}"')
|
||||||
|
if width:
|
||||||
|
parts.append(f':width "{_esc(width)}"')
|
||||||
|
if thumbnail:
|
||||||
|
parts.append(f':thumbnail "{_esc(thumbnail)}"')
|
||||||
|
if loop:
|
||||||
|
parts.append(":loop true")
|
||||||
|
return "(~kg-video " + " ".join(parts) + ")"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("file")
|
||||||
|
def _file(node: dict) -> str:
|
||||||
|
src = node.get("src", "")
|
||||||
|
filename = node.get("fileName", "")
|
||||||
|
title = node.get("title", "") or filename
|
||||||
|
file_size = node.get("fileSize", 0)
|
||||||
|
caption = node.get("caption", "")
|
||||||
|
|
||||||
|
# Format size
|
||||||
|
size_str = ""
|
||||||
|
if file_size:
|
||||||
|
kb = file_size / 1024
|
||||||
|
if kb < 1024:
|
||||||
|
size_str = f"{kb:.0f} KB"
|
||||||
|
else:
|
||||||
|
size_str = f"{kb / 1024:.1f} MB"
|
||||||
|
|
||||||
|
parts = [f':src "{_esc(src)}"']
|
||||||
|
if filename:
|
||||||
|
parts.append(f':filename "{_esc(filename)}"')
|
||||||
|
if title:
|
||||||
|
parts.append(f':title "{_esc(title)}"')
|
||||||
|
if size_str:
|
||||||
|
parts.append(f':filesize "{size_str}"')
|
||||||
|
if caption:
|
||||||
|
parts.append(f':caption "{_esc(caption)}"')
|
||||||
|
return "(~kg-file " + " ".join(parts) + ")"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("paywall")
|
||||||
|
def _paywall(_node: dict) -> str:
|
||||||
|
return "(~kg-paywall)"
|
||||||
|
|
||||||
|
|
||||||
|
@_converter("markdown")
|
||||||
|
def _markdown(node: dict) -> str:
|
||||||
|
md_text = node.get("markdown", "")
|
||||||
|
rendered = mistune.html(md_text)
|
||||||
|
return f'(~kg-html :html "{_esc(rendered)}")'
|
||||||
@@ -63,6 +63,7 @@ def _post_to_public(p: Post) -> Dict[str, Any]:
|
|||||||
"slug": p.slug,
|
"slug": p.slug,
|
||||||
"title": p.title,
|
"title": p.title,
|
||||||
"html": p.html,
|
"html": p.html,
|
||||||
|
"sx_content": p.sx_content,
|
||||||
"is_page": p.is_page,
|
"is_page": p.is_page,
|
||||||
"excerpt": p.custom_excerpt or p.excerpt,
|
"excerpt": p.custom_excerpt or p.excerpt,
|
||||||
"custom_excerpt": p.custom_excerpt,
|
"custom_excerpt": p.custom_excerpt,
|
||||||
|
|||||||
@@ -265,6 +265,7 @@ def register(url_prefix, title):
|
|||||||
return await make_response(html, 400)
|
return await make_response(html, 400)
|
||||||
|
|
||||||
# Create directly in db_blog
|
# Create directly in db_blog
|
||||||
|
sx_content_raw = form.get("sx_content", "").strip() or None
|
||||||
post = await writer_create(
|
post = await writer_create(
|
||||||
g.s,
|
g.s,
|
||||||
title=title,
|
title=title,
|
||||||
@@ -274,6 +275,7 @@ def register(url_prefix, title):
|
|||||||
feature_image=feature_image or None,
|
feature_image=feature_image or None,
|
||||||
custom_excerpt=custom_excerpt or None,
|
custom_excerpt=custom_excerpt or None,
|
||||||
feature_image_caption=feature_image_caption or None,
|
feature_image_caption=feature_image_caption or None,
|
||||||
|
sx_content=sx_content_raw,
|
||||||
)
|
)
|
||||||
await g.s.flush()
|
await g.s.flush()
|
||||||
|
|
||||||
@@ -337,6 +339,7 @@ def register(url_prefix, title):
|
|||||||
return await make_response(html, 400)
|
return await make_response(html, 400)
|
||||||
|
|
||||||
# Create directly in db_blog
|
# Create directly in db_blog
|
||||||
|
sx_content_raw = form.get("sx_content", "").strip() or None
|
||||||
page = await writer_create_page(
|
page = await writer_create_page(
|
||||||
g.s,
|
g.s,
|
||||||
title=title,
|
title=title,
|
||||||
@@ -346,6 +349,7 @@ def register(url_prefix, title):
|
|||||||
feature_image=feature_image or None,
|
feature_image=feature_image or None,
|
||||||
custom_excerpt=custom_excerpt or None,
|
custom_excerpt=custom_excerpt or None,
|
||||||
feature_image_caption=feature_image_caption or None,
|
feature_image_caption=feature_image_caption or None,
|
||||||
|
sx_content=sx_content_raw,
|
||||||
)
|
)
|
||||||
await g.s.flush()
|
await g.s.flush()
|
||||||
|
|
||||||
|
|||||||
@@ -200,6 +200,63 @@ def register():
|
|||||||
sx_src = await render_post_data_oob(tctx)
|
sx_src = await render_post_data_oob(tctx)
|
||||||
return sx_response(sx_src)
|
return sx_response(sx_src)
|
||||||
|
|
||||||
|
@bp.get("/preview/")
|
||||||
|
@require_admin
|
||||||
|
async def preview(slug: str):
|
||||||
|
from models.ghost_content import Post
|
||||||
|
from sqlalchemy import select as sa_select
|
||||||
|
|
||||||
|
from shared.sx.page import get_template_context
|
||||||
|
from sx.sx_components import render_post_preview_page, render_post_preview_oob
|
||||||
|
|
||||||
|
post_id = g.post_data["post"]["id"]
|
||||||
|
post = (await g.s.execute(
|
||||||
|
sa_select(Post).where(Post.id == post_id)
|
||||||
|
)).scalar_one_or_none()
|
||||||
|
|
||||||
|
# Build the 4 preview views
|
||||||
|
preview_ctx = {}
|
||||||
|
|
||||||
|
# 1. Prettified sx source
|
||||||
|
sx_content = getattr(post, "sx_content", None) or ""
|
||||||
|
if sx_content:
|
||||||
|
from shared.sx.prettify import sx_to_pretty_sx
|
||||||
|
preview_ctx["sx_pretty"] = sx_to_pretty_sx(sx_content)
|
||||||
|
|
||||||
|
# 2. Prettified lexical JSON
|
||||||
|
lexical_raw = getattr(post, "lexical", None) or ""
|
||||||
|
if lexical_raw:
|
||||||
|
from shared.sx.prettify import json_to_pretty_sx
|
||||||
|
preview_ctx["json_pretty"] = json_to_pretty_sx(lexical_raw)
|
||||||
|
|
||||||
|
# 3. SX rendered preview
|
||||||
|
if sx_content:
|
||||||
|
from shared.sx.parser import parse as sx_parse
|
||||||
|
from shared.sx.html import render as sx_html_render
|
||||||
|
from shared.sx.jinja_bridge import _COMPONENT_ENV
|
||||||
|
try:
|
||||||
|
parsed = sx_parse(sx_content)
|
||||||
|
preview_ctx["sx_rendered"] = sx_html_render(parsed, dict(_COMPONENT_ENV))
|
||||||
|
except Exception:
|
||||||
|
preview_ctx["sx_rendered"] = "<em>Error rendering sx</em>"
|
||||||
|
|
||||||
|
# 4. Lexical rendered preview
|
||||||
|
if lexical_raw:
|
||||||
|
from bp.blog.ghost.lexical_renderer import render_lexical
|
||||||
|
try:
|
||||||
|
preview_ctx["lex_rendered"] = render_lexical(lexical_raw)
|
||||||
|
except Exception:
|
||||||
|
preview_ctx["lex_rendered"] = "<em>Error rendering lexical</em>"
|
||||||
|
|
||||||
|
tctx = await get_template_context()
|
||||||
|
tctx.update(preview_ctx)
|
||||||
|
if not is_htmx_request():
|
||||||
|
html = await render_post_preview_page(tctx)
|
||||||
|
return await make_response(html)
|
||||||
|
else:
|
||||||
|
sx_src = await render_post_preview_oob(tctx)
|
||||||
|
return sx_response(sx_src)
|
||||||
|
|
||||||
@bp.get("/entries/calendar/<int:calendar_id>/")
|
@bp.get("/entries/calendar/<int:calendar_id>/")
|
||||||
@require_admin
|
@require_admin
|
||||||
async def calendar_view(slug: str, calendar_id: int):
|
async def calendar_view(slug: str, calendar_id: int):
|
||||||
@@ -562,6 +619,7 @@ def register():
|
|||||||
elif status and status != current_status:
|
elif status and status != current_status:
|
||||||
effective_status = status
|
effective_status = status
|
||||||
|
|
||||||
|
sx_content_raw = form.get("sx_content", "").strip() or None
|
||||||
try:
|
try:
|
||||||
post = await writer_update(
|
post = await writer_update(
|
||||||
g.s,
|
g.s,
|
||||||
@@ -573,6 +631,7 @@ def register():
|
|||||||
custom_excerpt=custom_excerpt or None,
|
custom_excerpt=custom_excerpt or None,
|
||||||
feature_image_caption=feature_image_caption or None,
|
feature_image_caption=feature_image_caption or None,
|
||||||
status=effective_status,
|
status=effective_status,
|
||||||
|
sx_content=sx_content_raw,
|
||||||
)
|
)
|
||||||
except OptimisticLockError:
|
except OptimisticLockError:
|
||||||
return redirect(
|
return redirect(
|
||||||
|
|||||||
@@ -60,6 +60,7 @@ class Post(Base):
|
|||||||
plaintext: Mapped[Optional[str]] = mapped_column(Text())
|
plaintext: Mapped[Optional[str]] = mapped_column(Text())
|
||||||
mobiledoc: Mapped[Optional[str]] = mapped_column(Text())
|
mobiledoc: Mapped[Optional[str]] = mapped_column(Text())
|
||||||
lexical: Mapped[Optional[str]] = mapped_column(Text())
|
lexical: Mapped[Optional[str]] = mapped_column(Text())
|
||||||
|
sx_content: Mapped[Optional[str]] = mapped_column(Text())
|
||||||
|
|
||||||
feature_image: Mapped[Optional[str]] = mapped_column(Text())
|
feature_image: Mapped[Optional[str]] = mapped_column(Text())
|
||||||
feature_image_alt: Mapped[Optional[str]] = mapped_column(Text())
|
feature_image_alt: Mapped[Optional[str]] = mapped_column(Text())
|
||||||
|
|||||||
68
blog/scripts/backfill_sx_content.py
Normal file
68
blog/scripts/backfill_sx_content.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Backfill sx_content from lexical JSON for all posts that have lexical but no sx_content.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python -m blog.scripts.backfill_sx_content [--dry-run]
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from sqlalchemy import select, and_
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
|
||||||
|
async def backfill(dry_run: bool = False) -> int:
|
||||||
|
from shared.db.session import get_session
|
||||||
|
from models.ghost_content import Post
|
||||||
|
from bp.blog.ghost.lexical_to_sx import lexical_to_sx
|
||||||
|
|
||||||
|
converted = 0
|
||||||
|
errors = 0
|
||||||
|
|
||||||
|
async with get_session() as sess:
|
||||||
|
stmt = select(Post).where(
|
||||||
|
and_(
|
||||||
|
Post.lexical.isnot(None),
|
||||||
|
Post.lexical != "",
|
||||||
|
(Post.sx_content.is_(None)) | (Post.sx_content == ""),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
result = await sess.execute(stmt)
|
||||||
|
posts = result.scalars().all()
|
||||||
|
|
||||||
|
print(f"Found {len(posts)} posts to convert")
|
||||||
|
|
||||||
|
for post in posts:
|
||||||
|
try:
|
||||||
|
sx = lexical_to_sx(post.lexical)
|
||||||
|
if dry_run:
|
||||||
|
print(f" [DRY RUN] {post.slug}: {len(sx)} chars")
|
||||||
|
else:
|
||||||
|
post.sx_content = sx
|
||||||
|
print(f" Converted: {post.slug} ({len(sx)} chars)")
|
||||||
|
converted += 1
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ERROR: {post.slug}: {e}", file=sys.stderr)
|
||||||
|
errors += 1
|
||||||
|
|
||||||
|
if not dry_run:
|
||||||
|
await sess.commit()
|
||||||
|
|
||||||
|
print(f"\nDone: {converted} converted, {errors} errors")
|
||||||
|
return converted
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description="Backfill sx_content from lexical JSON")
|
||||||
|
parser.add_argument("--dry-run", action="store_true", help="Don't write to database")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
asyncio.run(backfill(dry_run=args.dry_run))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -19,6 +19,7 @@ def _post_to_dto(post: Post) -> PostDTO:
|
|||||||
is_page=post.is_page,
|
is_page=post.is_page,
|
||||||
feature_image=post.feature_image,
|
feature_image=post.feature_image,
|
||||||
html=post.html,
|
html=post.html,
|
||||||
|
sx_content=post.sx_content,
|
||||||
excerpt=post.excerpt,
|
excerpt=post.excerpt,
|
||||||
custom_excerpt=post.custom_excerpt,
|
custom_excerpt=post.custom_excerpt,
|
||||||
published_at=post.published_at,
|
published_at=post.published_at,
|
||||||
|
|||||||
@@ -207,6 +207,7 @@ async def create_post(
|
|||||||
feature_image_caption: str | None = None,
|
feature_image_caption: str | None = None,
|
||||||
tag_names: list[str] | None = None,
|
tag_names: list[str] | None = None,
|
||||||
is_page: bool = False,
|
is_page: bool = False,
|
||||||
|
sx_content: str | None = None,
|
||||||
) -> Post:
|
) -> Post:
|
||||||
"""Create a new post or page directly in db_blog."""
|
"""Create a new post or page directly in db_blog."""
|
||||||
html, plaintext, reading_time = _render_and_extract(lexical_json)
|
html, plaintext, reading_time = _render_and_extract(lexical_json)
|
||||||
@@ -217,6 +218,7 @@ async def create_post(
|
|||||||
title=title or "Untitled",
|
title=title or "Untitled",
|
||||||
slug=slug,
|
slug=slug,
|
||||||
lexical=lexical_json if isinstance(lexical_json, str) else json.dumps(lexical_json),
|
lexical=lexical_json if isinstance(lexical_json, str) else json.dumps(lexical_json),
|
||||||
|
sx_content=sx_content,
|
||||||
html=html,
|
html=html,
|
||||||
plaintext=plaintext,
|
plaintext=plaintext,
|
||||||
reading_time=reading_time,
|
reading_time=reading_time,
|
||||||
@@ -281,6 +283,7 @@ async def create_page(
|
|||||||
custom_excerpt: str | None = None,
|
custom_excerpt: str | None = None,
|
||||||
feature_image_caption: str | None = None,
|
feature_image_caption: str | None = None,
|
||||||
tag_names: list[str] | None = None,
|
tag_names: list[str] | None = None,
|
||||||
|
sx_content: str | None = None,
|
||||||
) -> Post:
|
) -> Post:
|
||||||
"""Create a new page. Convenience wrapper around create_post."""
|
"""Create a new page. Convenience wrapper around create_post."""
|
||||||
return await create_post(
|
return await create_post(
|
||||||
@@ -294,6 +297,7 @@ async def create_page(
|
|||||||
feature_image_caption=feature_image_caption,
|
feature_image_caption=feature_image_caption,
|
||||||
tag_names=tag_names,
|
tag_names=tag_names,
|
||||||
is_page=True,
|
is_page=True,
|
||||||
|
sx_content=sx_content,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -308,6 +312,7 @@ async def update_post(
|
|||||||
custom_excerpt: str | None = ..., # type: ignore[assignment]
|
custom_excerpt: str | None = ..., # type: ignore[assignment]
|
||||||
feature_image_caption: str | None = ..., # type: ignore[assignment]
|
feature_image_caption: str | None = ..., # type: ignore[assignment]
|
||||||
status: str | None = None,
|
status: str | None = None,
|
||||||
|
sx_content: str | None = ..., # type: ignore[assignment]
|
||||||
) -> Post:
|
) -> Post:
|
||||||
"""Update post content. Optimistic lock via expected_updated_at.
|
"""Update post content. Optimistic lock via expected_updated_at.
|
||||||
|
|
||||||
@@ -342,6 +347,9 @@ async def update_post(
|
|||||||
if title is not None:
|
if title is not None:
|
||||||
post.title = title
|
post.title = title
|
||||||
|
|
||||||
|
if sx_content is not _SENTINEL:
|
||||||
|
post.sx_content = sx_content
|
||||||
|
|
||||||
if feature_image is not _SENTINEL:
|
if feature_image is not _SENTINEL:
|
||||||
post.feature_image = feature_image
|
post.feature_image = feature_image
|
||||||
if custom_excerpt is not _SENTINEL:
|
if custom_excerpt is not _SENTINEL:
|
||||||
|
|||||||
@@ -142,3 +142,30 @@
|
|||||||
(defcomp ~blog-tag-group-edit-main (&key edit-form delete-form)
|
(defcomp ~blog-tag-group-edit-main (&key edit-form delete-form)
|
||||||
(div :class "max-w-2xl mx-auto px-4 py-6 space-y-6"
|
(div :class "max-w-2xl mx-auto px-4 py-6 space-y-6"
|
||||||
edit-form delete-form))
|
edit-form delete-form))
|
||||||
|
|
||||||
|
;; Preview panel components
|
||||||
|
|
||||||
|
(defcomp ~blog-preview-panel (&key sections)
|
||||||
|
(div :class "max-w-4xl mx-auto px-4 py-6 space-y-4"
|
||||||
|
(style "
|
||||||
|
.sx-pretty, .json-pretty { font-family: monospace; font-size: 12px; line-height: 1.6; white-space: pre-wrap; }
|
||||||
|
.sx-list, .json-obj, .json-arr { display: block; }
|
||||||
|
.sx-paren { color: #64748b; }
|
||||||
|
.sx-sym { color: #0369a1; }
|
||||||
|
.sx-kw { color: #7c3aed; }
|
||||||
|
.sx-str { color: #15803d; }
|
||||||
|
.sx-num { color: #c2410c; }
|
||||||
|
.sx-bool { color: #b91c1c; font-weight: 600; }
|
||||||
|
.json-brace, .json-bracket { color: #64748b; }
|
||||||
|
.json-key { color: #7c3aed; }
|
||||||
|
.json-str { color: #15803d; }
|
||||||
|
.json-num { color: #c2410c; }
|
||||||
|
.json-lit { color: #b91c1c; font-weight: 600; }
|
||||||
|
.json-field { display: block; }
|
||||||
|
")
|
||||||
|
sections))
|
||||||
|
|
||||||
|
(defcomp ~blog-preview-section (&key title content)
|
||||||
|
(details :class "border rounded bg-white"
|
||||||
|
(summary :class "cursor-pointer px-4 py-3 font-medium text-sm bg-stone-100 hover:bg-stone-200 select-none" title)
|
||||||
|
(div :class "p-4 overflow-x-auto text-xs" content)))
|
||||||
|
|||||||
@@ -25,13 +25,15 @@
|
|||||||
excerpt
|
excerpt
|
||||||
(div :class "hidden md:block" at-bar)))
|
(div :class "hidden md:block" at-bar)))
|
||||||
|
|
||||||
(defcomp ~blog-detail-main (&key draft chrome feature-image html-content)
|
(defcomp ~blog-detail-main (&key draft chrome feature-image html-content sx-content)
|
||||||
(<> (article :class "relative"
|
(<> (article :class "relative"
|
||||||
draft
|
draft
|
||||||
chrome
|
chrome
|
||||||
(when feature-image (div :class "mb-3 flex justify-center"
|
(when feature-image (div :class "mb-3 flex justify-center"
|
||||||
(img :src feature-image :alt "" :class "rounded-lg w-full md:w-3/4 object-cover")))
|
(img :src feature-image :alt "" :class "rounded-lg w-full md:w-3/4 object-cover")))
|
||||||
(when html-content (div :class "blog-content p-2" (~rich-text :html html-content))))
|
(if sx-content
|
||||||
|
(div :class "blog-content p-2" sx-content)
|
||||||
|
(when html-content (div :class "blog-content p-2" (~rich-text :html html-content)))))
|
||||||
(div :class "pb-8")))
|
(div :class "pb-8")))
|
||||||
|
|
||||||
(defcomp ~blog-meta (&key robots page-title desc canonical og-type og-title image twitter-card twitter-title)
|
(defcomp ~blog-meta (&key robots page-title desc canonical og-type og-title image twitter-card twitter-title)
|
||||||
@@ -50,5 +52,8 @@
|
|||||||
(meta :name "twitter:description" :content desc)
|
(meta :name "twitter:description" :content desc)
|
||||||
(when image (meta :name "twitter:image" :content image))))
|
(when image (meta :name "twitter:image" :content image))))
|
||||||
|
|
||||||
(defcomp ~blog-home-main (&key html-content)
|
(defcomp ~blog-home-main (&key html-content sx-content)
|
||||||
(article :class "relative" (div :class "blog-content p-2" (~rich-text :html html-content))))
|
(article :class "relative"
|
||||||
|
(if sx-content
|
||||||
|
(div :class "blog-content p-2" sx-content)
|
||||||
|
(div :class "blog-content p-2" (~rich-text :html html-content)))))
|
||||||
|
|||||||
146
blog/sx/kg_cards.sx
Normal file
146
blog/sx/kg_cards.sx
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
;; KG card components — Ghost/Koenig-compatible card rendering
|
||||||
|
;; Produces same HTML structure as lexical_renderer.py so cards.css works unchanged.
|
||||||
|
;; Used by both display pipeline and block editor.
|
||||||
|
|
||||||
|
;; @css kg-card kg-image-card kg-width-wide kg-width-full kg-gallery-card kg-gallery-container kg-gallery-row kg-gallery-image kg-embed-card kg-bookmark-card kg-bookmark-container kg-bookmark-content kg-bookmark-title kg-bookmark-description kg-bookmark-metadata kg-bookmark-icon kg-bookmark-author kg-bookmark-publisher kg-bookmark-thumbnail kg-callout-card kg-callout-emoji kg-callout-text kg-button-card kg-btn kg-btn-accent kg-toggle-card kg-toggle-heading kg-toggle-heading-text kg-toggle-card-icon kg-toggle-content kg-audio-card kg-audio-thumbnail kg-audio-player-container kg-audio-title kg-audio-player kg-audio-play-icon kg-audio-current-time kg-audio-time kg-audio-seek-slider kg-audio-playback-rate kg-audio-unmute-icon kg-audio-volume-slider kg-video-card kg-video-container kg-file-card kg-file-card-container kg-file-card-contents kg-file-card-title kg-file-card-filesize kg-file-card-icon kg-file-card-caption kg-align-center kg-align-left kg-callout-card-grey kg-callout-card-white kg-callout-card-blue kg-callout-card-green kg-callout-card-yellow kg-callout-card-red kg-callout-card-pink kg-callout-card-purple kg-callout-card-accent placeholder
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Image card
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-image (&key src alt caption width href)
|
||||||
|
(figure :class (str "kg-card kg-image-card"
|
||||||
|
(if (= width "wide") " kg-width-wide"
|
||||||
|
(if (= width "full") " kg-width-full" "")))
|
||||||
|
(if href
|
||||||
|
(a :href href (img :src src :alt (or alt "") :loading "lazy"))
|
||||||
|
(img :src src :alt (or alt "") :loading "lazy"))
|
||||||
|
(when caption (figcaption caption))))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Gallery card
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-gallery (&key images caption)
|
||||||
|
(figure :class "kg-card kg-gallery-card kg-width-wide"
|
||||||
|
(div :class "kg-gallery-container"
|
||||||
|
(map (lambda (row)
|
||||||
|
(div :class "kg-gallery-row"
|
||||||
|
(map (lambda (img-data)
|
||||||
|
(figure :class "kg-gallery-image"
|
||||||
|
(img :src (get img-data "src") :alt (or (get img-data "alt") "") :loading "lazy")
|
||||||
|
(when (get img-data "caption") (figcaption (get img-data "caption")))))
|
||||||
|
row)))
|
||||||
|
images))
|
||||||
|
(when caption (figcaption caption))))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; HTML card (raw HTML injection)
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-html (&key html)
|
||||||
|
(~rich-text :html html))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Embed card
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-embed (&key html caption)
|
||||||
|
(figure :class "kg-card kg-embed-card"
|
||||||
|
(~rich-text :html html)
|
||||||
|
(when caption (figcaption caption))))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Bookmark card
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-bookmark (&key url title description icon author publisher thumbnail caption)
|
||||||
|
(figure :class "kg-card kg-bookmark-card"
|
||||||
|
(a :class "kg-bookmark-container" :href url
|
||||||
|
(div :class "kg-bookmark-content"
|
||||||
|
(div :class "kg-bookmark-title" (or title ""))
|
||||||
|
(div :class "kg-bookmark-description" (or description ""))
|
||||||
|
(when (or icon author publisher)
|
||||||
|
(span :class "kg-bookmark-metadata"
|
||||||
|
(when icon (img :class "kg-bookmark-icon" :src icon :alt ""))
|
||||||
|
(when author (span :class "kg-bookmark-author" author))
|
||||||
|
(when publisher (span :class "kg-bookmark-publisher" publisher)))))
|
||||||
|
(when thumbnail
|
||||||
|
(div :class "kg-bookmark-thumbnail"
|
||||||
|
(img :src thumbnail :alt ""))))
|
||||||
|
(when caption (figcaption caption))))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Callout card
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-callout (&key color emoji content)
|
||||||
|
(div :class (str "kg-card kg-callout-card kg-callout-card-" (or color "grey"))
|
||||||
|
(when emoji (div :class "kg-callout-emoji" emoji))
|
||||||
|
(div :class "kg-callout-text" (or content ""))))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Button card
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-button (&key url text alignment)
|
||||||
|
(div :class (str "kg-card kg-button-card kg-align-" (or alignment "center"))
|
||||||
|
(a :href url :class "kg-btn kg-btn-accent" (or text ""))))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Toggle card (accordion)
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-toggle (&key heading content)
|
||||||
|
(div :class "kg-card kg-toggle-card" :data-kg-toggle-state "close"
|
||||||
|
(div :class "kg-toggle-heading"
|
||||||
|
(h4 :class "kg-toggle-heading-text" (or heading ""))
|
||||||
|
(button :class "kg-toggle-card-icon"
|
||||||
|
(~rich-text :html "<svg viewBox=\"0 0 14 14\"><path d=\"M7 0a.5.5 0 0 1 .5.5v6h6a.5.5 0 1 1 0 1h-6v6a.5.5 0 1 1-1 0v-6h-6a.5.5 0 0 1 0-1h6v-6A.5.5 0 0 1 7 0Z\" fill=\"currentColor\"/></svg>")))
|
||||||
|
(div :class "kg-toggle-content" (or content ""))))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Audio card
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-audio (&key src title duration thumbnail)
|
||||||
|
(div :class "kg-card kg-audio-card"
|
||||||
|
(if thumbnail
|
||||||
|
(img :src thumbnail :alt "audio-thumbnail" :class "kg-audio-thumbnail")
|
||||||
|
(div :class "kg-audio-thumbnail placeholder"
|
||||||
|
(~rich-text :html "<svg viewBox=\"0 0 24 24\"><path d=\"M2 12C2 6.48 6.48 2 12 2s10 4.48 10 10-4.48 10-10 10S2 17.52 2 12zm7.5 5.25L16 12 9.5 6.75v10.5z\" fill=\"currentColor\"/></svg>")))
|
||||||
|
(div :class "kg-audio-player-container"
|
||||||
|
(div :class "kg-audio-title" (or title ""))
|
||||||
|
(div :class "kg-audio-player"
|
||||||
|
(button :class "kg-audio-play-icon"
|
||||||
|
(~rich-text :html "<svg viewBox=\"0 0 24 24\"><path d=\"M8 5v14l11-7z\" fill=\"currentColor\"/></svg>"))
|
||||||
|
(div :class "kg-audio-current-time" "0:00")
|
||||||
|
(div :class "kg-audio-time" (str "/ " (or duration "0:00")))
|
||||||
|
(input :type "range" :class "kg-audio-seek-slider" :max "100" :value "0")
|
||||||
|
(button :class "kg-audio-playback-rate" "1×")
|
||||||
|
(button :class "kg-audio-unmute-icon"
|
||||||
|
(~rich-text :html "<svg viewBox=\"0 0 24 24\"><path d=\"M3 9v6h4l5 5V4L7 9H3zm13.5 3c0-1.77-1.02-3.29-2.5-4.03v8.05c1.48-.73 2.5-2.25 2.5-4.02zM14 3.23v2.06c2.89.86 5 3.54 5 6.71s-2.11 5.85-5 6.71v2.06c4.01-.91 7-4.49 7-8.77s-2.99-7.86-7-8.77z\" fill=\"currentColor\"/></svg>"))
|
||||||
|
(input :type "range" :class "kg-audio-volume-slider" :max "100" :value "100")))
|
||||||
|
(audio :src src :preload "metadata")))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Video card
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-video (&key src caption width thumbnail loop)
|
||||||
|
(figure :class (str "kg-card kg-video-card"
|
||||||
|
(if (= width "wide") " kg-width-wide"
|
||||||
|
(if (= width "full") " kg-width-full" "")))
|
||||||
|
(div :class "kg-video-container"
|
||||||
|
(video :src src :controls true :preload "metadata"
|
||||||
|
:poster (or thumbnail nil) :loop (or loop nil)))
|
||||||
|
(when caption (figcaption caption))))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; File card
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-file (&key src filename title filesize caption)
|
||||||
|
(div :class "kg-card kg-file-card"
|
||||||
|
(a :class "kg-file-card-container" :href src :download (or filename "")
|
||||||
|
(div :class "kg-file-card-contents"
|
||||||
|
(div :class "kg-file-card-title" (or title filename ""))
|
||||||
|
(when filesize (div :class "kg-file-card-filesize" filesize)))
|
||||||
|
(div :class "kg-file-card-icon"
|
||||||
|
(~rich-text :html "<svg viewBox=\"0 0 24 24\"><path d=\"M19 9h-4V3H9v6H5l7 7 7-7zM5 18v2h14v-2H5z\" fill=\"currentColor\"/></svg>")))
|
||||||
|
(when caption (div :class "kg-file-card-caption" caption))))
|
||||||
|
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
;; Paywall marker
|
||||||
|
;; ---------------------------------------------------------------------------
|
||||||
|
(defcomp ~kg-paywall ()
|
||||||
|
(~rich-text :html "<!--members-only-->"))
|
||||||
@@ -716,11 +716,13 @@ def _post_main_panel_sx(ctx: dict) -> str:
|
|||||||
|
|
||||||
fi = post.get("feature_image")
|
fi = post.get("feature_image")
|
||||||
html_content = post.get("html", "")
|
html_content = post.get("html", "")
|
||||||
|
sx_content = post.get("sx_content", "")
|
||||||
|
|
||||||
return sx_call("blog-detail-main",
|
return sx_call("blog-detail-main",
|
||||||
draft=SxExpr(draft_sx) if draft_sx else None,
|
draft=SxExpr(draft_sx) if draft_sx else None,
|
||||||
chrome=SxExpr(chrome_sx) if chrome_sx else None,
|
chrome=SxExpr(chrome_sx) if chrome_sx else None,
|
||||||
feature_image=fi, html_content=html_content,
|
feature_image=fi, html_content=html_content,
|
||||||
|
sx_content=SxExpr(sx_content) if sx_content else None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -770,10 +772,13 @@ def _post_meta_sx(ctx: dict) -> str:
|
|||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
def _home_main_panel_sx(ctx: dict) -> str:
|
def _home_main_panel_sx(ctx: dict) -> str:
|
||||||
"""Home page content — renders the Ghost page HTML."""
|
"""Home page content — renders the Ghost page HTML or sx_content."""
|
||||||
post = ctx.get("post") or {}
|
post = ctx.get("post") or {}
|
||||||
html = post.get("html", "")
|
html = post.get("html", "")
|
||||||
return sx_call("blog-home-main", html_content=html)
|
sx_content = post.get("sx_content", "")
|
||||||
|
return sx_call("blog-home-main",
|
||||||
|
html_content=html,
|
||||||
|
sx_content=SxExpr(sx_content) if sx_content else None)
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
@@ -1372,6 +1377,68 @@ async def render_post_data_oob(ctx: dict) -> str:
|
|||||||
return oob_page_sx(oobs=admin_hdr_oob, content=content)
|
return oob_page_sx(oobs=admin_hdr_oob, content=content)
|
||||||
|
|
||||||
|
|
||||||
|
# ---- Post preview ----
|
||||||
|
|
||||||
|
def _preview_main_panel_sx(ctx: dict) -> str:
|
||||||
|
"""Build the preview panel with 4 expandable sections."""
|
||||||
|
sections: list[str] = []
|
||||||
|
|
||||||
|
# 1. Prettified SX source
|
||||||
|
sx_pretty = ctx.get("sx_pretty", "")
|
||||||
|
if sx_pretty:
|
||||||
|
sections.append(sx_call("blog-preview-section",
|
||||||
|
title="S-Expression Source",
|
||||||
|
content=SxExpr(sx_pretty),
|
||||||
|
))
|
||||||
|
|
||||||
|
# 2. Prettified Lexical JSON
|
||||||
|
json_pretty = ctx.get("json_pretty", "")
|
||||||
|
if json_pretty:
|
||||||
|
sections.append(sx_call("blog-preview-section",
|
||||||
|
title="Lexical JSON",
|
||||||
|
content=SxExpr(json_pretty),
|
||||||
|
))
|
||||||
|
|
||||||
|
# 3. SX rendered preview
|
||||||
|
sx_rendered = ctx.get("sx_rendered", "")
|
||||||
|
if sx_rendered:
|
||||||
|
rendered_sx = f'(div :class "blog-content prose max-w-none" (raw! {sx_serialize(sx_rendered)}))'
|
||||||
|
sections.append(sx_call("blog-preview-section",
|
||||||
|
title="SX Rendered",
|
||||||
|
content=SxExpr(rendered_sx),
|
||||||
|
))
|
||||||
|
|
||||||
|
# 4. Lexical rendered preview
|
||||||
|
lex_rendered = ctx.get("lex_rendered", "")
|
||||||
|
if lex_rendered:
|
||||||
|
rendered_sx = f'(div :class "blog-content prose max-w-none" (raw! {sx_serialize(lex_rendered)}))'
|
||||||
|
sections.append(sx_call("blog-preview-section",
|
||||||
|
title="Lexical Rendered",
|
||||||
|
content=SxExpr(rendered_sx),
|
||||||
|
))
|
||||||
|
|
||||||
|
if not sections:
|
||||||
|
return '(div :class "p-8 text-stone-500" "No content to preview.")'
|
||||||
|
|
||||||
|
inner = " ".join(sections)
|
||||||
|
return sx_call("blog-preview-panel", sections=SxExpr(f"(<> {inner})"))
|
||||||
|
|
||||||
|
|
||||||
|
async def render_post_preview_page(ctx: dict) -> str:
|
||||||
|
root_hdr = root_header_sx(ctx)
|
||||||
|
post_hdr = _post_header_sx(ctx)
|
||||||
|
admin_hdr = _post_admin_header_sx(ctx, selected="preview")
|
||||||
|
header_rows = "(<> " + root_hdr + " " + post_hdr + " " + admin_hdr + ")"
|
||||||
|
content = _preview_main_panel_sx(ctx)
|
||||||
|
return full_page_sx(ctx, header_rows=header_rows, content=content)
|
||||||
|
|
||||||
|
|
||||||
|
async def render_post_preview_oob(ctx: dict) -> str:
|
||||||
|
admin_hdr_oob = _post_admin_header_sx(ctx, oob=True, selected="preview")
|
||||||
|
content = _preview_main_panel_sx(ctx)
|
||||||
|
return oob_page_sx(oobs=admin_hdr_oob, content=content)
|
||||||
|
|
||||||
|
|
||||||
# ---- Post entries ----
|
# ---- Post entries ----
|
||||||
|
|
||||||
async def render_post_entries_page(ctx: dict) -> str:
|
async def render_post_entries_page(ctx: dict) -> str:
|
||||||
|
|||||||
278
blog/tests/test_lexical_to_sx.py
Normal file
278
blog/tests/test_lexical_to_sx.py
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
"""Unit tests for the Lexical JSON → sx converter."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# The lexical_to_sx module is standalone (only depends on json).
|
||||||
|
# Import it directly to avoid pulling in the full blog app.
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "bp", "blog", "ghost"))
|
||||||
|
from lexical_to_sx import lexical_to_sx
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _doc(*children):
|
||||||
|
"""Wrap children in a minimal Lexical document."""
|
||||||
|
return {"root": {"children": list(children)}}
|
||||||
|
|
||||||
|
|
||||||
|
def _text(s, fmt=0):
|
||||||
|
return {"type": "text", "text": s, "format": fmt}
|
||||||
|
|
||||||
|
|
||||||
|
def _paragraph(*children):
|
||||||
|
return {"type": "paragraph", "children": list(children)}
|
||||||
|
|
||||||
|
|
||||||
|
def _heading(tag, *children):
|
||||||
|
return {"type": "heading", "tag": tag, "children": list(children)}
|
||||||
|
|
||||||
|
|
||||||
|
def _link(url, *children):
|
||||||
|
return {"type": "link", "url": url, "children": list(children)}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Basic text
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestBasicText:
|
||||||
|
def test_empty_doc(self):
|
||||||
|
result = lexical_to_sx(_doc())
|
||||||
|
assert result == '(<> (p ""))'
|
||||||
|
|
||||||
|
def test_single_paragraph(self):
|
||||||
|
result = lexical_to_sx(_doc(_paragraph(_text("Hello"))))
|
||||||
|
assert result == '(p "Hello")'
|
||||||
|
|
||||||
|
def test_two_paragraphs(self):
|
||||||
|
result = lexical_to_sx(_doc(
|
||||||
|
_paragraph(_text("Hello")),
|
||||||
|
_paragraph(_text("World")),
|
||||||
|
))
|
||||||
|
assert "(p " in result
|
||||||
|
assert '"Hello"' in result
|
||||||
|
assert '"World"' in result
|
||||||
|
|
||||||
|
def test_heading(self):
|
||||||
|
result = lexical_to_sx(_doc(_heading("h2", _text("Title"))))
|
||||||
|
assert result == '(h2 "Title")'
|
||||||
|
|
||||||
|
def test_h3(self):
|
||||||
|
result = lexical_to_sx(_doc(_heading("h3", _text("Sub"))))
|
||||||
|
assert result == '(h3 "Sub")'
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Formatting
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestFormatting:
|
||||||
|
def test_bold(self):
|
||||||
|
result = lexical_to_sx(_doc(_paragraph(_text("hi", 1))))
|
||||||
|
assert "(strong " in result
|
||||||
|
|
||||||
|
def test_italic(self):
|
||||||
|
result = lexical_to_sx(_doc(_paragraph(_text("hi", 2))))
|
||||||
|
assert "(em " in result
|
||||||
|
|
||||||
|
def test_strikethrough(self):
|
||||||
|
result = lexical_to_sx(_doc(_paragraph(_text("hi", 4))))
|
||||||
|
assert "(s " in result
|
||||||
|
|
||||||
|
def test_bold_italic(self):
|
||||||
|
result = lexical_to_sx(_doc(_paragraph(_text("hi", 3))))
|
||||||
|
assert "(strong " in result
|
||||||
|
assert "(em " in result
|
||||||
|
|
||||||
|
def test_code(self):
|
||||||
|
result = lexical_to_sx(_doc(_paragraph(_text("x", 16))))
|
||||||
|
assert "(code " in result
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Links
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestLinks:
|
||||||
|
def test_link(self):
|
||||||
|
result = lexical_to_sx(_doc(
|
||||||
|
_paragraph(_link("https://example.com", _text("click")))
|
||||||
|
))
|
||||||
|
assert '(a :href "https://example.com"' in result
|
||||||
|
assert '"click"' in result
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Lists
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestLists:
|
||||||
|
def test_unordered_list(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "list", "listType": "bullet",
|
||||||
|
"children": [
|
||||||
|
{"type": "listitem", "children": [_text("one")]},
|
||||||
|
{"type": "listitem", "children": [_text("two")]},
|
||||||
|
]
|
||||||
|
}))
|
||||||
|
assert "(ul " in result
|
||||||
|
assert "(li " in result
|
||||||
|
assert '"one"' in result
|
||||||
|
|
||||||
|
def test_ordered_list(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "list", "listType": "number",
|
||||||
|
"children": [
|
||||||
|
{"type": "listitem", "children": [_text("first")]},
|
||||||
|
]
|
||||||
|
}))
|
||||||
|
assert "(ol " in result
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Block elements
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestBlocks:
|
||||||
|
def test_hr(self):
|
||||||
|
result = lexical_to_sx(_doc({"type": "horizontalrule"}))
|
||||||
|
assert result == "(hr)"
|
||||||
|
|
||||||
|
def test_quote(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "quote", "children": [_text("wisdom")]
|
||||||
|
}))
|
||||||
|
assert '(blockquote "wisdom")' == result
|
||||||
|
|
||||||
|
def test_codeblock(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "codeblock", "code": "print('hi')", "language": "python"
|
||||||
|
}))
|
||||||
|
assert '(pre (code :class "language-python"' in result
|
||||||
|
assert "print" in result
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Cards
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestCards:
|
||||||
|
def test_image(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "image", "src": "photo.jpg", "alt": "test"
|
||||||
|
}))
|
||||||
|
assert '(~kg-image :src "photo.jpg" :alt "test")' == result
|
||||||
|
|
||||||
|
def test_image_wide_with_caption(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "image", "src": "p.jpg", "alt": "",
|
||||||
|
"cardWidth": "wide", "caption": "Fig 1"
|
||||||
|
}))
|
||||||
|
assert ':width "wide"' in result
|
||||||
|
assert ':caption "Fig 1"' in result
|
||||||
|
|
||||||
|
def test_bookmark(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "bookmark", "url": "https://example.com",
|
||||||
|
"metadata": {"title": "Example", "description": "A site"}
|
||||||
|
}))
|
||||||
|
assert "(~kg-bookmark " in result
|
||||||
|
assert ':url "https://example.com"' in result
|
||||||
|
assert ':title "Example"' in result
|
||||||
|
|
||||||
|
def test_callout(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "callout", "backgroundColor": "blue",
|
||||||
|
"calloutEmoji": "💡",
|
||||||
|
"children": [_text("Note")]
|
||||||
|
}))
|
||||||
|
assert "(~kg-callout " in result
|
||||||
|
assert ':color "blue"' in result
|
||||||
|
|
||||||
|
def test_button(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "button", "buttonText": "Click",
|
||||||
|
"buttonUrl": "https://example.com"
|
||||||
|
}))
|
||||||
|
assert "(~kg-button " in result
|
||||||
|
assert ':text "Click"' in result
|
||||||
|
|
||||||
|
def test_toggle(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "toggle", "heading": "FAQ",
|
||||||
|
"children": [_text("Answer")]
|
||||||
|
}))
|
||||||
|
assert "(~kg-toggle " in result
|
||||||
|
assert ':heading "FAQ"' in result
|
||||||
|
|
||||||
|
def test_html(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "html", "html": "<div>custom</div>"
|
||||||
|
}))
|
||||||
|
assert "(~kg-html " in result
|
||||||
|
|
||||||
|
def test_embed(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "embed", "html": "<iframe></iframe>",
|
||||||
|
"caption": "Video"
|
||||||
|
}))
|
||||||
|
assert "(~kg-embed " in result
|
||||||
|
assert ':caption "Video"' in result
|
||||||
|
|
||||||
|
def test_video(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "video", "src": "v.mp4", "cardWidth": "wide"
|
||||||
|
}))
|
||||||
|
assert "(~kg-video " in result
|
||||||
|
assert ':width "wide"' in result
|
||||||
|
|
||||||
|
def test_audio(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "audio", "src": "s.mp3", "title": "Song", "duration": 195
|
||||||
|
}))
|
||||||
|
assert "(~kg-audio " in result
|
||||||
|
assert ':duration "3:15"' in result
|
||||||
|
|
||||||
|
def test_file(self):
|
||||||
|
result = lexical_to_sx(_doc({
|
||||||
|
"type": "file", "src": "f.pdf", "fileName": "doc.pdf",
|
||||||
|
"fileSize": 2100000
|
||||||
|
}))
|
||||||
|
assert "(~kg-file " in result
|
||||||
|
assert ':filename "doc.pdf"' in result
|
||||||
|
assert "MB" in result
|
||||||
|
|
||||||
|
def test_paywall(self):
|
||||||
|
result = lexical_to_sx(_doc({"type": "paywall"}))
|
||||||
|
assert result == "(~kg-paywall)"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Escaping
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestEscaping:
|
||||||
|
def test_quotes_in_text(self):
|
||||||
|
result = lexical_to_sx(_doc(_paragraph(_text('He said "hello"'))))
|
||||||
|
assert '\\"hello\\"' in result
|
||||||
|
|
||||||
|
def test_backslash_in_text(self):
|
||||||
|
result = lexical_to_sx(_doc(_paragraph(_text("a\\b"))))
|
||||||
|
assert "a\\\\b" in result
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# JSON string input
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestJsonString:
|
||||||
|
def test_string_input(self):
|
||||||
|
import json
|
||||||
|
doc = _doc(_paragraph(_text("test")))
|
||||||
|
result = lexical_to_sx(json.dumps(doc))
|
||||||
|
assert '(p "test")' == result
|
||||||
@@ -87,6 +87,7 @@ class PostDTO:
|
|||||||
is_page: bool = False
|
is_page: bool = False
|
||||||
feature_image: str | None = None
|
feature_image: str | None = None
|
||||||
html: str | None = None
|
html: str | None = None
|
||||||
|
sx_content: str | None = None
|
||||||
excerpt: str | None = None
|
excerpt: str | None = None
|
||||||
custom_excerpt: str | None = None
|
custom_excerpt: str | None = None
|
||||||
published_at: datetime | None = None
|
published_at: datetime | None = None
|
||||||
|
|||||||
2290
shared/static/scripts/sx-editor.js
Normal file
2290
shared/static/scripts/sx-editor.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,8 @@ page elements (headers, search, etc.) from template context.
|
|||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from markupsafe import escape
|
from markupsafe import escape
|
||||||
@@ -203,6 +205,7 @@ def post_admin_header_sx(ctx: dict, slug: str, *, oob: bool = False,
|
|||||||
("cart_url", f"/{slug}/admin/payments/", "payments"),
|
("cart_url", f"/{slug}/admin/payments/", "payments"),
|
||||||
("blog_url", f"/{slug}/admin/entries/", "entries"),
|
("blog_url", f"/{slug}/admin/entries/", "entries"),
|
||||||
("blog_url", f"/{slug}/admin/data/", "data"),
|
("blog_url", f"/{slug}/admin/data/", "data"),
|
||||||
|
("blog_url", f"/{slug}/admin/preview/", "preview"),
|
||||||
("blog_url", f"/{slug}/admin/edit/", "edit"),
|
("blog_url", f"/{slug}/admin/edit/", "edit"),
|
||||||
("blog_url", f"/{slug}/admin/settings/", "settings"),
|
("blog_url", f"/{slug}/admin/settings/", "settings"),
|
||||||
]
|
]
|
||||||
@@ -482,8 +485,8 @@ details.group{{overflow:hidden}}details.group>summary{{list-style:none}}details.
|
|||||||
<body class="bg-stone-50 text-stone-900">
|
<body class="bg-stone-50 text-stone-900">
|
||||||
<script type="text/sx" data-components>{component_defs}</script>
|
<script type="text/sx" data-components>{component_defs}</script>
|
||||||
<script type="text/sx" data-mount="body">{page_sx}</script>
|
<script type="text/sx" data-mount="body">{page_sx}</script>
|
||||||
<script src="{asset_url}/scripts/sx.js?v=20260301d"></script>
|
<script src="{asset_url}/scripts/sx.js?v={sx_js_hash}"></script>
|
||||||
<script src="{asset_url}/scripts/body.js"></script>
|
<script src="{asset_url}/scripts/body.js?v={body_js_hash}"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>"""
|
</html>"""
|
||||||
|
|
||||||
@@ -544,9 +547,25 @@ def sx_page(ctx: dict, page_sx: str, *,
|
|||||||
page_sx=page_sx,
|
page_sx=page_sx,
|
||||||
sx_css=sx_css,
|
sx_css=sx_css,
|
||||||
sx_css_classes=sx_css_classes,
|
sx_css_classes=sx_css_classes,
|
||||||
|
sx_js_hash=_script_hash("sx.js"),
|
||||||
|
body_js_hash=_script_hash("body.js"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_SCRIPT_HASH_CACHE: dict[str, str] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _script_hash(filename: str) -> str:
|
||||||
|
"""Compute MD5 hash of a static script file, cached for process lifetime."""
|
||||||
|
if filename not in _SCRIPT_HASH_CACHE:
|
||||||
|
try:
|
||||||
|
data = (Path("static") / "scripts" / filename).read_bytes()
|
||||||
|
_SCRIPT_HASH_CACHE[filename] = hashlib.md5(data).hexdigest()[:8]
|
||||||
|
except Exception:
|
||||||
|
_SCRIPT_HASH_CACHE[filename] = "dev"
|
||||||
|
return _SCRIPT_HASH_CACHE[filename]
|
||||||
|
|
||||||
|
|
||||||
def _get_csrf_token() -> str:
|
def _get_csrf_token() -> str:
|
||||||
"""Get the CSRF token from the current request context."""
|
"""Get the CSRF token from the current request context."""
|
||||||
try:
|
try:
|
||||||
|
|||||||
149
shared/sx/prettify.py
Normal file
149
shared/sx/prettify.py
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
"""
|
||||||
|
Prettifiers that produce s-expression source for syntax-highlighted DOM.
|
||||||
|
|
||||||
|
``sx_to_pretty_sx(source)`` — parse sx, emit sx that renders as coloured DOM.
|
||||||
|
``json_to_pretty_sx(json_str)`` — parse JSON, emit sx that renders as coloured DOM.
|
||||||
|
|
||||||
|
The output is *not* HTML — it's sx source that, when evaluated and rendered by
|
||||||
|
the sx engine, produces ``<pre>`` blocks with ``<span>`` elements carrying
|
||||||
|
CSS classes for syntax highlighting.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from .parser import parse, serialize
|
||||||
|
from .types import Keyword, Symbol, NIL
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers — build sx AST (lists), then serialize once at the end
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _span(cls: str, text: str) -> list:
|
||||||
|
"""Build an sx AST node: (span :class "cls" "text")."""
|
||||||
|
return [Symbol("span"), Keyword("class"), cls, text]
|
||||||
|
|
||||||
|
|
||||||
|
def _str_display(cls: str, value: str) -> list:
|
||||||
|
"""Build a span showing a quoted string value.
|
||||||
|
|
||||||
|
Uses curly quotes so the display delimiters don't conflict with
|
||||||
|
sx string syntax.
|
||||||
|
"""
|
||||||
|
return [Symbol("span"), Keyword("class"), cls,
|
||||||
|
[Symbol("span"), Keyword("class"), f"{cls}-q", "\u201c"],
|
||||||
|
value,
|
||||||
|
[Symbol("span"), Keyword("class"), f"{cls}-q", "\u201d"]]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# S-expression prettifier
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def sx_to_pretty_sx(source: str) -> str:
|
||||||
|
"""Parse *source* as sx and return sx source that renders as highlighted DOM."""
|
||||||
|
try:
|
||||||
|
expr = parse(source)
|
||||||
|
except Exception:
|
||||||
|
return serialize([Symbol("pre"), Keyword("class"), "sx-pretty", source])
|
||||||
|
inner = _sx_node(expr, depth=0)
|
||||||
|
return serialize([Symbol("pre"), Keyword("class"), "sx-pretty", inner])
|
||||||
|
|
||||||
|
|
||||||
|
def _sx_node(expr: Any, depth: int) -> list:
|
||||||
|
"""Recursively convert a parsed sx value to an sx AST for pretty display."""
|
||||||
|
if isinstance(expr, list):
|
||||||
|
if not expr:
|
||||||
|
return [_span("sx-paren", "("), _span("sx-paren", ")")]
|
||||||
|
return _sx_list(expr, depth)
|
||||||
|
if isinstance(expr, Symbol):
|
||||||
|
return _span("sx-sym", expr.name)
|
||||||
|
if isinstance(expr, Keyword):
|
||||||
|
return _span("sx-kw", f":{expr.name}")
|
||||||
|
if isinstance(expr, str):
|
||||||
|
return _str_display("sx-str", expr)
|
||||||
|
if isinstance(expr, bool):
|
||||||
|
return _span("sx-bool", "true" if expr else "false")
|
||||||
|
if isinstance(expr, (int, float)):
|
||||||
|
return _span("sx-num", str(expr))
|
||||||
|
if expr is None or expr is NIL:
|
||||||
|
return _span("sx-sym", "nil")
|
||||||
|
return _span("sx-sym", str(expr))
|
||||||
|
|
||||||
|
|
||||||
|
def _sx_list(items: list, depth: int) -> list:
|
||||||
|
"""Format a list as a prettified sx AST node."""
|
||||||
|
children: list = []
|
||||||
|
for item in items:
|
||||||
|
children.append(_sx_node(item, depth + 1))
|
||||||
|
indent_style = f"margin-left: {depth * 16}px"
|
||||||
|
return [Symbol("div"), Keyword("class"), "sx-list",
|
||||||
|
Keyword("style"), indent_style,
|
||||||
|
_span("sx-paren", "("),
|
||||||
|
*children,
|
||||||
|
_span("sx-paren", ")")]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# JSON prettifier
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def json_to_pretty_sx(json_str: str) -> str:
|
||||||
|
"""Parse *json_str* as JSON and return sx source that renders as highlighted DOM."""
|
||||||
|
try:
|
||||||
|
data = json.loads(json_str)
|
||||||
|
except (json.JSONDecodeError, TypeError):
|
||||||
|
return serialize([Symbol("pre"), Keyword("class"), "json-pretty",
|
||||||
|
json_str or ""])
|
||||||
|
inner = _json_node(data, depth=0)
|
||||||
|
return serialize([Symbol("pre"), Keyword("class"), "json-pretty", inner])
|
||||||
|
|
||||||
|
|
||||||
|
def _json_node(val: Any, depth: int) -> list:
|
||||||
|
"""Recursively convert a JSON value to an sx AST for pretty display."""
|
||||||
|
if isinstance(val, dict):
|
||||||
|
return _json_object(val, depth)
|
||||||
|
if isinstance(val, list):
|
||||||
|
return _json_array(val, depth)
|
||||||
|
if isinstance(val, str):
|
||||||
|
return _str_display("json-str", val)
|
||||||
|
if isinstance(val, bool):
|
||||||
|
return _span("json-lit", "true" if val else "false")
|
||||||
|
if val is None:
|
||||||
|
return _span("json-lit", "null")
|
||||||
|
if isinstance(val, (int, float)):
|
||||||
|
return _span("json-num", str(val))
|
||||||
|
return _span("json-str", str(val))
|
||||||
|
|
||||||
|
|
||||||
|
def _json_object(obj: dict, depth: int) -> list:
|
||||||
|
if not obj:
|
||||||
|
return [_span("json-brace", "{"), _span("json-brace", "}")]
|
||||||
|
indent_style = f"margin-left: {depth * 16}px"
|
||||||
|
fields: list = []
|
||||||
|
for key, val in obj.items():
|
||||||
|
key_node = _str_display("json-key", key)
|
||||||
|
val_node = _json_node(val, depth + 1)
|
||||||
|
fields.append([Symbol("div"), Keyword("class"), "json-field",
|
||||||
|
key_node, ": ", val_node])
|
||||||
|
return [Symbol("div"), Keyword("class"), "json-obj",
|
||||||
|
Keyword("style"), indent_style,
|
||||||
|
_span("json-brace", "{"),
|
||||||
|
*fields,
|
||||||
|
_span("json-brace", "}")]
|
||||||
|
|
||||||
|
|
||||||
|
def _json_array(arr: list, depth: int) -> list:
|
||||||
|
if not arr:
|
||||||
|
return [_span("json-bracket", "["), _span("json-bracket", "]")]
|
||||||
|
indent_style = f"margin-left: {depth * 16}px"
|
||||||
|
items: list = []
|
||||||
|
for item in arr:
|
||||||
|
items.append(_json_node(item, depth + 1))
|
||||||
|
return [Symbol("div"), Keyword("class"), "json-arr",
|
||||||
|
Keyword("style"), indent_style,
|
||||||
|
_span("json-bracket", "["),
|
||||||
|
*items,
|
||||||
|
_span("json-bracket", "]")]
|
||||||
Reference in New Issue
Block a user