Replace inter-service _handlers dicts with declarative sx defquery/defaction
The inter-service data layer (fetch_data/call_action) was the least structured part of the codebase — Python _handlers dicts with ad-hoc param extraction scattered across 16 route files. This replaces them with declarative .sx query/action definitions that make the entire inter-service protocol self-describing and greppable. Infrastructure: - defquery/defaction special forms in the sx evaluator - Query/action registry with load, lookup, and schema introspection - Query executor using async_eval with I/O primitives - Blueprint factories (create_data_blueprint/create_action_blueprint) with sx-first dispatch and Python fallback - /internal/schema endpoint on every service - parse-datetime and split-ids primitives for type coercion Service extractions: - LikesService (toggle, is_liked, liked_slugs, liked_ids) - PageConfigService (ensure, get_by_container, get_by_id, get_batch, update) - RelationsService (wraps module-level functions) - AccountDataService (user_by_email, newsletters) - CartItemsService, MarketDataService (raw SQLAlchemy lookups) 50 of 54 handlers converted to sx, 4 Python fallbacks remain (ghost-sync/push-member, clear-cart-for-order, create-order). Net: -1,383 lines Python, +251 lines modified. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
31
shared/contracts/likes.py
Normal file
31
shared/contracts/likes.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Protocol for the Likes domain service."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Protocol, runtime_checkable
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class LikesService(Protocol):
|
||||
async def is_liked(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int, target_type: str,
|
||||
target_slug: str | None = None, target_id: int | None = None,
|
||||
) -> bool: ...
|
||||
|
||||
async def liked_slugs(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int, target_type: str,
|
||||
) -> list[str]: ...
|
||||
|
||||
async def liked_ids(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int, target_type: str,
|
||||
) -> list[int]: ...
|
||||
|
||||
async def toggle(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int, target_type: str,
|
||||
target_slug: str | None = None, target_id: int | None = None,
|
||||
) -> bool: ...
|
||||
@@ -117,6 +117,15 @@ def create_base_app(
|
||||
load_shared_components()
|
||||
load_relation_registry()
|
||||
|
||||
# Load defquery/defaction definitions from {service}/queries.sx and actions.sx
|
||||
from shared.sx.query_registry import load_service_protocols
|
||||
_app_root = Path(os.getcwd())
|
||||
load_service_protocols(name, str(_app_root))
|
||||
|
||||
# Register /internal/schema endpoint for protocol introspection
|
||||
from shared.infrastructure.schema_blueprint import create_schema_blueprint
|
||||
app.register_blueprint(create_schema_blueprint(name))
|
||||
|
||||
# Load CSS registry (tw.css → class-to-rule lookup for on-demand CSS)
|
||||
from shared.sx.css_registry import load_css_registry, registry_loaded
|
||||
_styles = BASE_DIR / "static" / "styles"
|
||||
|
||||
58
shared/infrastructure/protocol_manifest.py
Normal file
58
shared/infrastructure/protocol_manifest.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Protocol manifest — aggregates /internal/schema from all services.
|
||||
|
||||
Can be used as a CLI tool or imported for dev-mode inspection.
|
||||
|
||||
Usage::
|
||||
|
||||
python -m shared.infrastructure.protocol_manifest
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from shared.infrastructure.data_client import fetch_data
|
||||
|
||||
|
||||
# Service names that have inter-service protocols
|
||||
_SERVICES = [
|
||||
"blog", "market", "cart", "events", "account",
|
||||
"likes", "relations", "orders",
|
||||
]
|
||||
|
||||
|
||||
async def fetch_service_schema(service: str) -> dict[str, Any] | None:
|
||||
"""Fetch /internal/schema from a single service."""
|
||||
try:
|
||||
from shared.infrastructure.urls import service_url
|
||||
import aiohttp
|
||||
url = service_url(service, "/internal/schema")
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url, timeout=aiohttp.ClientTimeout(total=3)) as resp:
|
||||
if resp.status == 200:
|
||||
return await resp.json()
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
async def generate_manifest() -> dict[str, Any]:
|
||||
"""Fetch schemas from all services and produce a unified protocol map."""
|
||||
results = await asyncio.gather(
|
||||
*(fetch_service_schema(s) for s in _SERVICES),
|
||||
return_exceptions=True,
|
||||
)
|
||||
manifest = {"services": {}}
|
||||
for service, result in zip(_SERVICES, results):
|
||||
if isinstance(result, dict):
|
||||
manifest["services"][service] = result
|
||||
else:
|
||||
manifest["services"][service] = {"error": str(result) if isinstance(result, Exception) else "unavailable"}
|
||||
return manifest
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
m = asyncio.run(generate_manifest())
|
||||
print(json.dumps(m, indent=2))
|
||||
127
shared/infrastructure/query_blueprint.py
Normal file
127
shared/infrastructure/query_blueprint.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""
|
||||
Blueprint factories for sx-dispatched data and action routes.
|
||||
|
||||
Replaces per-service boilerplate in ``bp/data/routes.py`` and
|
||||
``bp/actions/routes.py`` by dispatching to defquery/defaction definitions
|
||||
from the sx query registry. Falls back to Python ``_handlers`` dicts
|
||||
for queries/actions not yet converted.
|
||||
|
||||
Usage::
|
||||
|
||||
from shared.infrastructure.query_blueprint import (
|
||||
create_data_blueprint, create_action_blueprint,
|
||||
)
|
||||
|
||||
# In service's bp/data/routes.py:
|
||||
def register() -> Blueprint:
|
||||
bp, _handlers = create_data_blueprint("events")
|
||||
# Optional Python fallback handlers:
|
||||
# _handlers["some-query"] = _some_python_handler
|
||||
return bp
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Callable, Awaitable
|
||||
|
||||
from quart import Blueprint, g, jsonify, request
|
||||
|
||||
logger = logging.getLogger("sx.query_blueprint")
|
||||
|
||||
|
||||
def create_data_blueprint(
|
||||
service_name: str,
|
||||
) -> tuple[Blueprint, dict[str, Callable[[], Awaitable[Any]]]]:
|
||||
"""Create a data blueprint that dispatches to sx queries with Python fallback.
|
||||
|
||||
Returns (blueprint, python_handlers_dict) so the caller can register
|
||||
Python fallback handlers for queries not yet converted to sx.
|
||||
"""
|
||||
from shared.infrastructure.data_client import DATA_HEADER
|
||||
|
||||
bp = Blueprint("data", __name__, url_prefix="/internal/data")
|
||||
|
||||
_handlers: dict[str, Callable[[], Awaitable[Any]]] = {}
|
||||
|
||||
@bp.before_request
|
||||
async def _require_data_header():
|
||||
if not request.headers.get(DATA_HEADER):
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
from shared.infrastructure.internal_auth import validate_internal_request
|
||||
if not validate_internal_request():
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
|
||||
@bp.get("/<query_name>")
|
||||
async def handle_query(query_name: str):
|
||||
# 1. Check sx query registry first
|
||||
from shared.sx.query_registry import get_query
|
||||
from shared.sx.query_executor import execute_query
|
||||
|
||||
qdef = get_query(service_name, query_name)
|
||||
if qdef is not None:
|
||||
result = await execute_query(qdef, dict(request.args))
|
||||
return jsonify(result)
|
||||
|
||||
# 2. Fall back to Python handlers
|
||||
handler = _handlers.get(query_name)
|
||||
if handler is not None:
|
||||
result = await handler()
|
||||
return jsonify(result)
|
||||
|
||||
return jsonify({"error": "unknown query"}), 404
|
||||
|
||||
return bp, _handlers
|
||||
|
||||
|
||||
def create_action_blueprint(
|
||||
service_name: str,
|
||||
) -> tuple[Blueprint, dict[str, Callable[[], Awaitable[Any]]]]:
|
||||
"""Create an action blueprint that dispatches to sx actions with Python fallback.
|
||||
|
||||
Returns (blueprint, python_handlers_dict) so the caller can register
|
||||
Python fallback handlers for actions not yet converted to sx.
|
||||
"""
|
||||
from shared.infrastructure.actions import ACTION_HEADER
|
||||
|
||||
bp = Blueprint("actions", __name__, url_prefix="/internal/actions")
|
||||
|
||||
_handlers: dict[str, Callable[[], Awaitable[Any]]] = {}
|
||||
|
||||
@bp.before_request
|
||||
async def _require_action_header():
|
||||
if not request.headers.get(ACTION_HEADER):
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
from shared.infrastructure.internal_auth import validate_internal_request
|
||||
if not validate_internal_request():
|
||||
return jsonify({"error": "forbidden"}), 403
|
||||
|
||||
@bp.post("/<action_name>")
|
||||
async def handle_action(action_name: str):
|
||||
# 1. Check sx action registry first
|
||||
from shared.sx.query_registry import get_action
|
||||
from shared.sx.query_executor import execute_action
|
||||
|
||||
adef = get_action(service_name, action_name)
|
||||
if adef is not None:
|
||||
try:
|
||||
payload = await request.get_json(force=True) or {}
|
||||
result = await execute_action(adef, payload)
|
||||
return jsonify(result)
|
||||
except Exception as exc:
|
||||
logger.exception("SX action %s:%s failed", service_name, action_name)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
|
||||
# 2. Fall back to Python handlers
|
||||
handler = _handlers.get(action_name)
|
||||
if handler is not None:
|
||||
try:
|
||||
result = await handler()
|
||||
return jsonify(result)
|
||||
except Exception as exc:
|
||||
logger.exception("Action %s failed", action_name)
|
||||
return jsonify({"error": str(exc)}), 500
|
||||
|
||||
return jsonify({"error": "unknown action"}), 404
|
||||
|
||||
return bp, _handlers
|
||||
31
shared/infrastructure/schema_blueprint.py
Normal file
31
shared/infrastructure/schema_blueprint.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Schema endpoint for inter-service protocol introspection.
|
||||
|
||||
Every service exposes ``GET /internal/schema`` which returns a JSON
|
||||
manifest of all defquery and defaction definitions with their parameter
|
||||
signatures and docstrings.
|
||||
|
||||
Usage::
|
||||
|
||||
from shared.infrastructure.schema_blueprint import create_schema_blueprint
|
||||
app.register_blueprint(create_schema_blueprint("events"))
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import Blueprint, jsonify, request
|
||||
|
||||
|
||||
def create_schema_blueprint(service_name: str) -> Blueprint:
|
||||
"""Create a blueprint exposing ``/internal/schema``."""
|
||||
bp = Blueprint(
|
||||
f"schema_{service_name}",
|
||||
__name__,
|
||||
url_prefix="/internal",
|
||||
)
|
||||
|
||||
@bp.get("/schema")
|
||||
async def get_schema():
|
||||
from shared.sx.query_registry import schema_for_service
|
||||
return jsonify(schema_for_service(service_name))
|
||||
|
||||
return bp
|
||||
42
shared/services/account_impl.py
Normal file
42
shared/services/account_impl.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Service methods for account data queries.
|
||||
|
||||
Extracted from account/bp/data/routes.py to enable sx defquery conversion.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models import User
|
||||
|
||||
|
||||
class SqlAccountDataService:
|
||||
|
||||
async def user_by_email(
|
||||
self, session: AsyncSession, *, email: str,
|
||||
) -> dict[str, Any] | None:
|
||||
email = (email or "").strip().lower()
|
||||
if not email:
|
||||
return None
|
||||
result = await session.execute(
|
||||
select(User.id).where(User.email.ilike(email))
|
||||
)
|
||||
row = result.first()
|
||||
if not row:
|
||||
return None
|
||||
return {"user_id": row[0]}
|
||||
|
||||
async def newsletters(self, session: AsyncSession) -> list[dict[str, Any]]:
|
||||
from shared.models.ghost_membership_entities import GhostNewsletter
|
||||
result = await session.execute(
|
||||
select(
|
||||
GhostNewsletter.id, GhostNewsletter.ghost_id,
|
||||
GhostNewsletter.name, GhostNewsletter.slug,
|
||||
).order_by(GhostNewsletter.name)
|
||||
)
|
||||
return [
|
||||
{"id": row[0], "ghost_id": row[1], "name": row[2], "slug": row[3]}
|
||||
for row in result.all()
|
||||
]
|
||||
37
shared/services/cart_items_impl.py
Normal file
37
shared/services/cart_items_impl.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Extra cart query methods not in the CartService protocol.
|
||||
|
||||
cart-items returns raw CartItem data without going through CartSummaryDTO.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models.market import CartItem
|
||||
|
||||
|
||||
class SqlCartItemsService:
|
||||
|
||||
async def cart_items(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int | None = None, session_id: str | None = None,
|
||||
) -> list[dict[str, Any]]:
|
||||
filters = [CartItem.deleted_at.is_(None)]
|
||||
if user_id is not None:
|
||||
filters.append(CartItem.user_id == user_id)
|
||||
elif session_id is not None:
|
||||
filters.append(CartItem.session_id == session_id)
|
||||
else:
|
||||
return []
|
||||
|
||||
result = await session.execute(select(CartItem).where(*filters))
|
||||
return [
|
||||
{
|
||||
"product_id": item.product_id,
|
||||
"product_slug": item.product_slug,
|
||||
"quantity": item.quantity,
|
||||
}
|
||||
for item in result.scalars().all()
|
||||
]
|
||||
103
shared/services/likes_impl.py
Normal file
103
shared/services/likes_impl.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""SQL implementation of the LikesService protocol.
|
||||
|
||||
Extracted from likes/bp/data/routes.py and likes/bp/actions/routes.py
|
||||
to enable sx defquery/defaction conversion.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import select, update, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from likes.models.like import Like
|
||||
|
||||
|
||||
class SqlLikesService:
|
||||
|
||||
async def is_liked(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int, target_type: str,
|
||||
target_slug: str | None = None, target_id: int | None = None,
|
||||
) -> bool:
|
||||
if not user_id or not target_type:
|
||||
return False
|
||||
filters = [
|
||||
Like.user_id == user_id,
|
||||
Like.target_type == target_type,
|
||||
Like.deleted_at.is_(None),
|
||||
]
|
||||
if target_slug is not None:
|
||||
filters.append(Like.target_slug == target_slug)
|
||||
elif target_id is not None:
|
||||
filters.append(Like.target_id == target_id)
|
||||
else:
|
||||
return False
|
||||
row = await session.scalar(select(Like.id).where(*filters))
|
||||
return row is not None
|
||||
|
||||
async def liked_slugs(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int, target_type: str,
|
||||
) -> list[str]:
|
||||
if not user_id or not target_type:
|
||||
return []
|
||||
result = await session.execute(
|
||||
select(Like.target_slug).where(
|
||||
Like.user_id == user_id,
|
||||
Like.target_type == target_type,
|
||||
Like.target_slug.isnot(None),
|
||||
Like.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def liked_ids(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int, target_type: str,
|
||||
) -> list[int]:
|
||||
if not user_id or not target_type:
|
||||
return []
|
||||
result = await session.execute(
|
||||
select(Like.target_id).where(
|
||||
Like.user_id == user_id,
|
||||
Like.target_type == target_type,
|
||||
Like.target_id.isnot(None),
|
||||
Like.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def toggle(
|
||||
self, session: AsyncSession, *,
|
||||
user_id: int, target_type: str,
|
||||
target_slug: str | None = None, target_id: int | None = None,
|
||||
) -> bool:
|
||||
"""Toggle a like. Returns True if now liked, False if unliked."""
|
||||
filters = [
|
||||
Like.user_id == user_id,
|
||||
Like.target_type == target_type,
|
||||
Like.deleted_at.is_(None),
|
||||
]
|
||||
if target_slug is not None:
|
||||
filters.append(Like.target_slug == target_slug)
|
||||
elif target_id is not None:
|
||||
filters.append(Like.target_id == target_id)
|
||||
else:
|
||||
raise ValueError("target_slug or target_id required")
|
||||
|
||||
existing = await session.scalar(select(Like).where(*filters))
|
||||
|
||||
if existing:
|
||||
await session.execute(
|
||||
update(Like).where(Like.id == existing.id).values(deleted_at=func.now())
|
||||
)
|
||||
return False
|
||||
else:
|
||||
new_like = Like(
|
||||
user_id=user_id,
|
||||
target_type=target_type,
|
||||
target_slug=target_slug,
|
||||
target_id=target_id,
|
||||
)
|
||||
session.add(new_like)
|
||||
await session.flush()
|
||||
return True
|
||||
55
shared/services/market_data_impl.py
Normal file
55
shared/services/market_data_impl.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Extra market query methods for raw-SQLAlchemy data lookups.
|
||||
|
||||
products-by-ids and marketplaces-by-ids use direct selects rather
|
||||
than the MarketService protocol methods.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
|
||||
class SqlMarketDataService:
|
||||
|
||||
async def products_by_ids(
|
||||
self, session: AsyncSession, *, ids: list[int],
|
||||
) -> list[dict[str, Any]]:
|
||||
if not ids:
|
||||
return []
|
||||
from shared.models.market import Product
|
||||
rows = (await session.execute(
|
||||
select(Product).where(Product.id.in_(ids))
|
||||
)).scalars().all()
|
||||
return [
|
||||
{
|
||||
"id": p.id,
|
||||
"title": p.title,
|
||||
"slug": p.slug,
|
||||
"image": p.image,
|
||||
"regular_price": str(p.regular_price) if p.regular_price is not None else None,
|
||||
"special_price": str(p.special_price) if p.special_price is not None else None,
|
||||
}
|
||||
for p in rows
|
||||
]
|
||||
|
||||
async def marketplaces_by_ids(
|
||||
self, session: AsyncSession, *, ids: list[int],
|
||||
) -> list[dict[str, Any]]:
|
||||
if not ids:
|
||||
return []
|
||||
from shared.models.market_place import MarketPlace
|
||||
rows = (await session.execute(
|
||||
select(MarketPlace).where(MarketPlace.id.in_(ids))
|
||||
)).scalars().all()
|
||||
return [
|
||||
{
|
||||
"id": m.id,
|
||||
"name": m.name,
|
||||
"slug": m.slug,
|
||||
"container_type": m.container_type,
|
||||
"container_id": m.container_id,
|
||||
}
|
||||
for m in rows
|
||||
]
|
||||
137
shared/services/page_config_impl.py
Normal file
137
shared/services/page_config_impl.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""SQL implementation of PageConfig service methods.
|
||||
|
||||
Extracted from blog/bp/data/routes.py and blog/bp/actions/routes.py
|
||||
to enable sx defquery/defaction conversion.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm.attributes import flag_modified
|
||||
|
||||
from shared.models.page_config import PageConfig
|
||||
|
||||
|
||||
def _to_dict(pc: PageConfig) -> dict[str, Any]:
|
||||
return {
|
||||
"id": pc.id,
|
||||
"container_type": pc.container_type,
|
||||
"container_id": pc.container_id,
|
||||
"features": pc.features or {},
|
||||
"sumup_merchant_code": pc.sumup_merchant_code,
|
||||
"sumup_api_key": pc.sumup_api_key,
|
||||
"sumup_checkout_prefix": pc.sumup_checkout_prefix,
|
||||
}
|
||||
|
||||
|
||||
class SqlPageConfigService:
|
||||
|
||||
async def ensure(
|
||||
self, session: AsyncSession, *,
|
||||
container_type: str = "page", container_id: int,
|
||||
) -> dict[str, Any]:
|
||||
"""Get or create a PageConfig. Returns minimal dict with id."""
|
||||
row = (await session.execute(
|
||||
select(PageConfig).where(
|
||||
PageConfig.container_type == container_type,
|
||||
PageConfig.container_id == container_id,
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if row is None:
|
||||
row = PageConfig(
|
||||
container_type=container_type,
|
||||
container_id=container_id,
|
||||
features={},
|
||||
)
|
||||
session.add(row)
|
||||
await session.flush()
|
||||
|
||||
return {
|
||||
"id": row.id,
|
||||
"container_type": row.container_type,
|
||||
"container_id": row.container_id,
|
||||
}
|
||||
|
||||
async def get_by_container(
|
||||
self, session: AsyncSession, *,
|
||||
container_type: str = "page", container_id: int,
|
||||
) -> dict[str, Any] | None:
|
||||
pc = (await session.execute(
|
||||
select(PageConfig).where(
|
||||
PageConfig.container_type == container_type,
|
||||
PageConfig.container_id == container_id,
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
return _to_dict(pc) if pc else None
|
||||
|
||||
async def get_by_id(
|
||||
self, session: AsyncSession, *, id: int,
|
||||
) -> dict[str, Any] | None:
|
||||
pc = await session.get(PageConfig, id)
|
||||
return _to_dict(pc) if pc else None
|
||||
|
||||
async def get_batch(
|
||||
self, session: AsyncSession, *,
|
||||
container_type: str = "page", ids: list[int],
|
||||
) -> list[dict[str, Any]]:
|
||||
if not ids:
|
||||
return []
|
||||
result = await session.execute(
|
||||
select(PageConfig).where(
|
||||
PageConfig.container_type == container_type,
|
||||
PageConfig.container_id.in_(ids),
|
||||
)
|
||||
)
|
||||
return [_to_dict(pc) for pc in result.scalars().all()]
|
||||
|
||||
async def update(
|
||||
self, session: AsyncSession, *,
|
||||
container_type: str = "page", container_id: int,
|
||||
features: dict | None = None,
|
||||
sumup_merchant_code: str | None = None,
|
||||
sumup_checkout_prefix: str | None = None,
|
||||
sumup_api_key: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
pc = (await session.execute(
|
||||
select(PageConfig).where(
|
||||
PageConfig.container_type == container_type,
|
||||
PageConfig.container_id == container_id,
|
||||
)
|
||||
)).scalar_one_or_none()
|
||||
|
||||
if pc is None:
|
||||
pc = PageConfig(
|
||||
container_type=container_type,
|
||||
container_id=container_id,
|
||||
features=features or {},
|
||||
)
|
||||
session.add(pc)
|
||||
await session.flush()
|
||||
|
||||
if features is not None:
|
||||
merged = dict(pc.features or {})
|
||||
for key, val in features.items():
|
||||
if isinstance(val, bool):
|
||||
merged[key] = val
|
||||
elif val in ("true", "1", "on"):
|
||||
merged[key] = True
|
||||
elif val in ("false", "0", "off", None):
|
||||
merged[key] = False
|
||||
pc.features = merged
|
||||
flag_modified(pc, "features")
|
||||
|
||||
if sumup_merchant_code is not None:
|
||||
pc.sumup_merchant_code = sumup_merchant_code or None
|
||||
if sumup_checkout_prefix is not None:
|
||||
pc.sumup_checkout_prefix = sumup_checkout_prefix or None
|
||||
if sumup_api_key is not None:
|
||||
pc.sumup_api_key = sumup_api_key or None
|
||||
|
||||
await session.flush()
|
||||
|
||||
result = _to_dict(pc)
|
||||
result["sumup_configured"] = bool(pc.sumup_api_key)
|
||||
return result
|
||||
@@ -23,6 +23,7 @@ from shared.contracts.protocols import (
|
||||
CartService,
|
||||
FederationService,
|
||||
)
|
||||
from shared.contracts.likes import LikesService
|
||||
|
||||
|
||||
class _ServiceRegistry:
|
||||
@@ -38,6 +39,7 @@ class _ServiceRegistry:
|
||||
self._market: MarketService | None = None
|
||||
self._cart: CartService | None = None
|
||||
self._federation: FederationService | None = None
|
||||
self._likes: LikesService | None = None
|
||||
self._extra: dict[str, Any] = {}
|
||||
|
||||
# -- calendar -------------------------------------------------------------
|
||||
@@ -73,6 +75,17 @@ class _ServiceRegistry:
|
||||
def cart(self, impl: CartService) -> None:
|
||||
self._cart = impl
|
||||
|
||||
# -- likes ----------------------------------------------------------------
|
||||
@property
|
||||
def likes(self) -> LikesService:
|
||||
if self._likes is None:
|
||||
raise RuntimeError("LikesService not registered")
|
||||
return self._likes
|
||||
|
||||
@likes.setter
|
||||
def likes(self, impl: LikesService) -> None:
|
||||
self._likes = impl
|
||||
|
||||
# -- federation -----------------------------------------------------------
|
||||
@property
|
||||
def federation(self) -> FederationService:
|
||||
|
||||
164
shared/services/relations_impl.py
Normal file
164
shared/services/relations_impl.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""Service wrapper for relations module functions.
|
||||
|
||||
Wraps the module-level functions in shared.services.relationships into
|
||||
a class so they can be called via the ``(service "relations" ...)`` primitive.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
|
||||
def _serialize_rel(r) -> dict[str, Any]:
|
||||
return {
|
||||
"id": r.id,
|
||||
"parent_type": r.parent_type,
|
||||
"parent_id": r.parent_id,
|
||||
"child_type": r.child_type,
|
||||
"child_id": r.child_id,
|
||||
"sort_order": r.sort_order,
|
||||
"label": r.label,
|
||||
"relation_type": r.relation_type,
|
||||
"metadata": r.metadata_,
|
||||
}
|
||||
|
||||
|
||||
class SqlRelationsService:
|
||||
|
||||
async def get_children(
|
||||
self, session: AsyncSession, *,
|
||||
parent_type: str, parent_id: int,
|
||||
child_type: str | None = None,
|
||||
relation_type: str | None = None,
|
||||
) -> list[dict[str, Any]]:
|
||||
from shared.services.relationships import get_children
|
||||
rels = await get_children(
|
||||
session, parent_type, parent_id, child_type,
|
||||
relation_type=relation_type,
|
||||
)
|
||||
return [_serialize_rel(r) for r in rels]
|
||||
|
||||
async def get_parents(
|
||||
self, session: AsyncSession, *,
|
||||
child_type: str, child_id: int,
|
||||
parent_type: str | None = None,
|
||||
relation_type: str | None = None,
|
||||
) -> list[dict[str, Any]]:
|
||||
from shared.services.relationships import get_parents
|
||||
rels = await get_parents(
|
||||
session, child_type, child_id, parent_type,
|
||||
relation_type=relation_type,
|
||||
)
|
||||
return [_serialize_rel(r) for r in rels]
|
||||
|
||||
async def attach_child(
|
||||
self, session: AsyncSession, *,
|
||||
parent_type: str, parent_id: int,
|
||||
child_type: str, child_id: int,
|
||||
label: str | None = None,
|
||||
sort_order: int | None = None,
|
||||
relation_type: str | None = None,
|
||||
metadata: dict | None = None,
|
||||
) -> dict[str, Any]:
|
||||
from shared.services.relationships import attach_child
|
||||
rel = await attach_child(
|
||||
session,
|
||||
parent_type=parent_type, parent_id=parent_id,
|
||||
child_type=child_type, child_id=child_id,
|
||||
label=label, sort_order=sort_order,
|
||||
relation_type=relation_type, metadata=metadata,
|
||||
)
|
||||
return _serialize_rel(rel)
|
||||
|
||||
async def detach_child(
|
||||
self, session: AsyncSession, *,
|
||||
parent_type: str, parent_id: int,
|
||||
child_type: str, child_id: int,
|
||||
relation_type: str | None = None,
|
||||
) -> bool:
|
||||
from shared.services.relationships import detach_child
|
||||
return await detach_child(
|
||||
session,
|
||||
parent_type=parent_type, parent_id=parent_id,
|
||||
child_type=child_type, child_id=child_id,
|
||||
relation_type=relation_type,
|
||||
)
|
||||
|
||||
async def relate(
|
||||
self, session: AsyncSession, *,
|
||||
relation_type: str,
|
||||
from_id: int, to_id: int,
|
||||
label: str | None = None,
|
||||
sort_order: int | None = None,
|
||||
metadata: dict | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Registry-aware relation creation with cardinality enforcement."""
|
||||
from shared.services.relationships import attach_child, get_children
|
||||
from shared.sx.relations import get_relation
|
||||
|
||||
defn = get_relation(relation_type)
|
||||
if defn is None:
|
||||
raise ValueError(f"unknown relation_type: {relation_type}")
|
||||
|
||||
if defn.cardinality == "one-to-one":
|
||||
existing = await get_children(
|
||||
session,
|
||||
parent_type=defn.from_type,
|
||||
parent_id=from_id,
|
||||
child_type=defn.to_type,
|
||||
relation_type=relation_type,
|
||||
)
|
||||
if existing:
|
||||
raise ValueError("one-to-one relation already exists")
|
||||
|
||||
rel = await attach_child(
|
||||
session,
|
||||
parent_type=defn.from_type, parent_id=from_id,
|
||||
child_type=defn.to_type, child_id=to_id,
|
||||
label=label, sort_order=sort_order,
|
||||
relation_type=relation_type, metadata=metadata,
|
||||
)
|
||||
return _serialize_rel(rel)
|
||||
|
||||
async def unrelate(
|
||||
self, session: AsyncSession, *,
|
||||
relation_type: str, from_id: int, to_id: int,
|
||||
) -> bool:
|
||||
from shared.services.relationships import detach_child
|
||||
from shared.sx.relations import get_relation
|
||||
|
||||
defn = get_relation(relation_type)
|
||||
if defn is None:
|
||||
raise ValueError(f"unknown relation_type: {relation_type}")
|
||||
|
||||
return await detach_child(
|
||||
session,
|
||||
parent_type=defn.from_type, parent_id=from_id,
|
||||
child_type=defn.to_type, child_id=to_id,
|
||||
relation_type=relation_type,
|
||||
)
|
||||
|
||||
async def can_relate(
|
||||
self, session: AsyncSession, *,
|
||||
relation_type: str, from_id: int,
|
||||
) -> dict[str, Any]:
|
||||
from shared.services.relationships import get_children
|
||||
from shared.sx.relations import get_relation
|
||||
|
||||
defn = get_relation(relation_type)
|
||||
if defn is None:
|
||||
return {"allowed": False, "reason": f"unknown relation_type: {relation_type}"}
|
||||
|
||||
if defn.cardinality == "one-to-one":
|
||||
existing = await get_children(
|
||||
session,
|
||||
parent_type=defn.from_type,
|
||||
parent_id=from_id,
|
||||
child_type=defn.to_type,
|
||||
relation_type=relation_type,
|
||||
)
|
||||
if existing:
|
||||
return {"allowed": False, "reason": "one-to-one relation already exists"}
|
||||
|
||||
return {"allowed": True}
|
||||
@@ -553,6 +553,75 @@ def _sf_defhandler(expr: list, env: dict) -> HandlerDef:
|
||||
return handler
|
||||
|
||||
|
||||
def _parse_key_params(params_expr: list) -> list[str]:
|
||||
"""Parse ``(&key param1 param2 ...)`` into a list of param name strings."""
|
||||
params: list[str] = []
|
||||
in_key = False
|
||||
for p in params_expr:
|
||||
if isinstance(p, Symbol):
|
||||
if p.name == "&key":
|
||||
in_key = True
|
||||
continue
|
||||
if in_key:
|
||||
params.append(p.name)
|
||||
elif isinstance(p, str):
|
||||
params.append(p)
|
||||
return params
|
||||
|
||||
|
||||
def _sf_defquery(expr: list, env: dict):
|
||||
"""``(defquery name (&key param...) "docstring" body)``"""
|
||||
from .types import QueryDef
|
||||
if len(expr) < 4:
|
||||
raise EvalError("defquery requires name, params, and body")
|
||||
name_sym = expr[1]
|
||||
if not isinstance(name_sym, Symbol):
|
||||
raise EvalError(f"defquery name must be symbol, got {type(name_sym).__name__}")
|
||||
params_expr = expr[2]
|
||||
if not isinstance(params_expr, list):
|
||||
raise EvalError("defquery params must be a list")
|
||||
params = _parse_key_params(params_expr)
|
||||
# Optional docstring before body
|
||||
if len(expr) >= 5 and isinstance(expr[3], str):
|
||||
doc = expr[3]
|
||||
body = expr[4]
|
||||
else:
|
||||
doc = ""
|
||||
body = expr[3]
|
||||
qdef = QueryDef(
|
||||
name=name_sym.name, params=params, doc=doc,
|
||||
body=body, closure=dict(env),
|
||||
)
|
||||
env[f"query:{name_sym.name}"] = qdef
|
||||
return qdef
|
||||
|
||||
|
||||
def _sf_defaction(expr: list, env: dict):
|
||||
"""``(defaction name (&key param...) "docstring" body)``"""
|
||||
from .types import ActionDef
|
||||
if len(expr) < 4:
|
||||
raise EvalError("defaction requires name, params, and body")
|
||||
name_sym = expr[1]
|
||||
if not isinstance(name_sym, Symbol):
|
||||
raise EvalError(f"defaction name must be symbol, got {type(name_sym).__name__}")
|
||||
params_expr = expr[2]
|
||||
if not isinstance(params_expr, list):
|
||||
raise EvalError("defaction params must be a list")
|
||||
params = _parse_key_params(params_expr)
|
||||
if len(expr) >= 5 and isinstance(expr[3], str):
|
||||
doc = expr[3]
|
||||
body = expr[4]
|
||||
else:
|
||||
doc = ""
|
||||
body = expr[3]
|
||||
adef = ActionDef(
|
||||
name=name_sym.name, params=params, doc=doc,
|
||||
body=body, closure=dict(env),
|
||||
)
|
||||
env[f"action:{name_sym.name}"] = adef
|
||||
return adef
|
||||
|
||||
|
||||
def _sf_set_bang(expr: list, env: dict) -> Any:
|
||||
"""``(set! name value)`` — mutate existing binding."""
|
||||
if len(expr) != 3:
|
||||
@@ -737,6 +806,8 @@ _SPECIAL_FORMS: dict[str, Any] = {
|
||||
"quasiquote": _sf_quasiquote,
|
||||
"defhandler": _sf_defhandler,
|
||||
"defpage": _sf_defpage,
|
||||
"defquery": _sf_defquery,
|
||||
"defaction": _sf_defaction,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -516,6 +516,23 @@ def prim_parse_int(val: Any, default: Any = 0) -> int | Any:
|
||||
return default
|
||||
|
||||
|
||||
@register_primitive("parse-datetime")
|
||||
def prim_parse_datetime(val: Any) -> Any:
|
||||
"""``(parse-datetime "2024-01-15T10:00:00")`` → datetime object."""
|
||||
from datetime import datetime
|
||||
if not val or val is NIL:
|
||||
return NIL
|
||||
return datetime.fromisoformat(str(val))
|
||||
|
||||
|
||||
@register_primitive("split-ids")
|
||||
def prim_split_ids(val: Any) -> list[int]:
|
||||
"""``(split-ids "1,2,3")`` → [1, 2, 3]. Parse comma-separated int IDs."""
|
||||
if not val or val is NIL:
|
||||
return []
|
||||
return [int(x.strip()) for x in str(val).split(",") if x.strip()]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Assertions
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
70
shared/sx/query_executor.py
Normal file
70
shared/sx/query_executor.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""
|
||||
Execute defquery / defaction definitions.
|
||||
|
||||
Unlike fragment handlers (which produce SX markup via ``async_eval_to_sx``),
|
||||
query/action defs produce **data** (dicts, lists, scalars) that get
|
||||
JSON-serialized by the calling blueprint. Uses ``async_eval()`` with
|
||||
the I/O primitive pipeline so ``(service ...)`` calls are awaited inline.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from .types import QueryDef, ActionDef, NIL
|
||||
|
||||
|
||||
async def execute_query(query_def: QueryDef, params: dict[str, str]) -> Any:
|
||||
"""Execute a defquery and return a JSON-serializable result.
|
||||
|
||||
Parameters are bound from request query string args.
|
||||
"""
|
||||
from .jinja_bridge import get_component_env, _get_request_context
|
||||
from .async_eval import async_eval
|
||||
|
||||
env = dict(get_component_env())
|
||||
env.update(query_def.closure)
|
||||
|
||||
# Bind params from request args (try kebab-case and snake_case)
|
||||
for param in query_def.params:
|
||||
snake = param.replace("-", "_")
|
||||
val = params.get(param, params.get(snake, NIL))
|
||||
# Coerce type=int for common patterns
|
||||
if isinstance(val, str) and val.lstrip("-").isdigit():
|
||||
val = int(val)
|
||||
env[param] = val
|
||||
|
||||
ctx = _get_request_context()
|
||||
result = await async_eval(query_def.body, env, ctx)
|
||||
return _normalize(result)
|
||||
|
||||
|
||||
async def execute_action(action_def: ActionDef, payload: dict[str, Any]) -> Any:
|
||||
"""Execute a defaction and return a JSON-serializable result.
|
||||
|
||||
Parameters are bound from the JSON request body.
|
||||
"""
|
||||
from .jinja_bridge import get_component_env, _get_request_context
|
||||
from .async_eval import async_eval
|
||||
|
||||
env = dict(get_component_env())
|
||||
env.update(action_def.closure)
|
||||
|
||||
# Bind params from JSON payload (try kebab-case and snake_case)
|
||||
for param in action_def.params:
|
||||
snake = param.replace("-", "_")
|
||||
val = payload.get(param, payload.get(snake, NIL))
|
||||
env[param] = val
|
||||
|
||||
ctx = _get_request_context()
|
||||
result = await async_eval(action_def.body, env, ctx)
|
||||
return _normalize(result)
|
||||
|
||||
|
||||
def _normalize(value: Any) -> Any:
|
||||
"""Ensure result is JSON-serializable (strip NIL, convert sets, etc)."""
|
||||
if value is NIL or value is None:
|
||||
return None
|
||||
if isinstance(value, set):
|
||||
return list(value)
|
||||
return value
|
||||
180
shared/sx/query_registry.py
Normal file
180
shared/sx/query_registry.py
Normal file
@@ -0,0 +1,180 @@
|
||||
"""
|
||||
Registry for defquery / defaction definitions.
|
||||
|
||||
Mirrors the pattern in ``handlers.py`` but for inter-service data queries
|
||||
and action endpoints. Each service loads its ``.sx`` files at startup,
|
||||
and the registry makes them available for dispatch by the query blueprint.
|
||||
|
||||
Usage::
|
||||
|
||||
from shared.sx.query_registry import load_query_file, get_query
|
||||
|
||||
load_query_file("events/queries.sx", "events")
|
||||
qdef = get_query("events", "pending-entries")
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
from .types import QueryDef, ActionDef
|
||||
|
||||
logger = logging.getLogger("sx.query_registry")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Registry — service → name → QueryDef / ActionDef
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_QUERY_REGISTRY: dict[str, dict[str, QueryDef]] = {}
|
||||
_ACTION_REGISTRY: dict[str, dict[str, ActionDef]] = {}
|
||||
|
||||
|
||||
def register_query(service: str, qdef: QueryDef) -> None:
|
||||
if service not in _QUERY_REGISTRY:
|
||||
_QUERY_REGISTRY[service] = {}
|
||||
_QUERY_REGISTRY[service][qdef.name] = qdef
|
||||
logger.debug("Registered query %s:%s", service, qdef.name)
|
||||
|
||||
|
||||
def register_action(service: str, adef: ActionDef) -> None:
|
||||
if service not in _ACTION_REGISTRY:
|
||||
_ACTION_REGISTRY[service] = {}
|
||||
_ACTION_REGISTRY[service][adef.name] = adef
|
||||
logger.debug("Registered action %s:%s", service, adef.name)
|
||||
|
||||
|
||||
def get_query(service: str, name: str) -> QueryDef | None:
|
||||
return _QUERY_REGISTRY.get(service, {}).get(name)
|
||||
|
||||
|
||||
def get_action(service: str, name: str) -> ActionDef | None:
|
||||
return _ACTION_REGISTRY.get(service, {}).get(name)
|
||||
|
||||
|
||||
def get_all_queries(service: str) -> dict[str, QueryDef]:
|
||||
return dict(_QUERY_REGISTRY.get(service, {}))
|
||||
|
||||
|
||||
def get_all_actions(service: str) -> dict[str, ActionDef]:
|
||||
return dict(_ACTION_REGISTRY.get(service, {}))
|
||||
|
||||
|
||||
def clear(service: str | None = None) -> None:
|
||||
if service is None:
|
||||
_QUERY_REGISTRY.clear()
|
||||
_ACTION_REGISTRY.clear()
|
||||
else:
|
||||
_QUERY_REGISTRY.pop(service, None)
|
||||
_ACTION_REGISTRY.pop(service, None)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Loading — parse .sx files and collect QueryDef / ActionDef instances
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def load_query_file(filepath: str, service_name: str) -> list[QueryDef]:
|
||||
"""Parse an .sx file and register any defquery definitions."""
|
||||
from .parser import parse_all
|
||||
from .evaluator import _eval
|
||||
from .jinja_bridge import get_component_env
|
||||
|
||||
with open(filepath, encoding="utf-8") as f:
|
||||
source = f.read()
|
||||
|
||||
env = dict(get_component_env())
|
||||
exprs = parse_all(source)
|
||||
queries: list[QueryDef] = []
|
||||
|
||||
for expr in exprs:
|
||||
_eval(expr, env)
|
||||
|
||||
for val in env.values():
|
||||
if isinstance(val, QueryDef):
|
||||
register_query(service_name, val)
|
||||
queries.append(val)
|
||||
|
||||
return queries
|
||||
|
||||
|
||||
def load_action_file(filepath: str, service_name: str) -> list[ActionDef]:
|
||||
"""Parse an .sx file and register any defaction definitions."""
|
||||
from .parser import parse_all
|
||||
from .evaluator import _eval
|
||||
from .jinja_bridge import get_component_env
|
||||
|
||||
with open(filepath, encoding="utf-8") as f:
|
||||
source = f.read()
|
||||
|
||||
env = dict(get_component_env())
|
||||
exprs = parse_all(source)
|
||||
actions: list[ActionDef] = []
|
||||
|
||||
for expr in exprs:
|
||||
_eval(expr, env)
|
||||
|
||||
for val in env.values():
|
||||
if isinstance(val, ActionDef):
|
||||
register_action(service_name, val)
|
||||
actions.append(val)
|
||||
|
||||
return actions
|
||||
|
||||
|
||||
def load_query_dir(directory: str, service_name: str) -> list[QueryDef]:
|
||||
"""Load all .sx files from a directory and register queries."""
|
||||
import glob as glob_mod
|
||||
queries: list[QueryDef] = []
|
||||
for filepath in sorted(glob_mod.glob(os.path.join(directory, "*.sx"))):
|
||||
queries.extend(load_query_file(filepath, service_name))
|
||||
return queries
|
||||
|
||||
|
||||
def load_action_dir(directory: str, service_name: str) -> list[ActionDef]:
|
||||
"""Load all .sx files from a directory and register actions."""
|
||||
import glob as glob_mod
|
||||
actions: list[ActionDef] = []
|
||||
for filepath in sorted(glob_mod.glob(os.path.join(directory, "*.sx"))):
|
||||
actions.extend(load_action_file(filepath, service_name))
|
||||
return actions
|
||||
|
||||
|
||||
def load_service_protocols(service_name: str, base_dir: str) -> None:
|
||||
"""Load queries.sx and actions.sx from a service's base directory."""
|
||||
queries_path = os.path.join(base_dir, "queries.sx")
|
||||
actions_path = os.path.join(base_dir, "actions.sx")
|
||||
if os.path.exists(queries_path):
|
||||
load_query_file(queries_path, service_name)
|
||||
logger.info("Loaded queries for %s from %s", service_name, queries_path)
|
||||
if os.path.exists(actions_path):
|
||||
load_action_file(actions_path, service_name)
|
||||
logger.info("Loaded actions for %s from %s", service_name, actions_path)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Schema — introspection for /internal/schema
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def schema_for_service(service: str) -> dict[str, Any]:
|
||||
"""Return a JSON-serializable schema of all queries and actions."""
|
||||
queries = []
|
||||
for qdef in _QUERY_REGISTRY.get(service, {}).values():
|
||||
queries.append({
|
||||
"name": qdef.name,
|
||||
"params": list(qdef.params),
|
||||
"doc": qdef.doc,
|
||||
})
|
||||
actions = []
|
||||
for adef in _ACTION_REGISTRY.get(service, {}).values():
|
||||
actions.append({
|
||||
"name": adef.name,
|
||||
"params": list(adef.params),
|
||||
"doc": adef.doc,
|
||||
})
|
||||
return {
|
||||
"service": service,
|
||||
"queries": sorted(queries, key=lambda q: q["name"]),
|
||||
"actions": sorted(actions, key=lambda a: a["name"]),
|
||||
}
|
||||
@@ -240,9 +240,47 @@ class PageDef:
|
||||
return f"<page:{self.name} path={self.path!r}>"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# QueryDef / ActionDef
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass
|
||||
class QueryDef:
|
||||
"""A declarative data query defined in an .sx file.
|
||||
|
||||
Created by ``(defquery name (&key param...) "docstring" body)``.
|
||||
The body is evaluated with async I/O primitives to produce JSON data.
|
||||
"""
|
||||
name: str
|
||||
params: list[str] # keyword parameter names
|
||||
doc: str # docstring
|
||||
body: Any # unevaluated s-expression body
|
||||
closure: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<query:{self.name}({', '.join(self.params)})>"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActionDef:
|
||||
"""A declarative action defined in an .sx file.
|
||||
|
||||
Created by ``(defaction name (&key param...) "docstring" body)``.
|
||||
The body is evaluated with async I/O primitives to produce JSON data.
|
||||
"""
|
||||
name: str
|
||||
params: list[str] # keyword parameter names
|
||||
doc: str # docstring
|
||||
body: Any # unevaluated s-expression body
|
||||
closure: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<action:{self.name}({', '.join(self.params)})>"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Type alias
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# An s-expression value after evaluation
|
||||
SExp = int | float | str | bool | Symbol | Keyword | Lambda | Macro | Component | HandlerDef | RelationDef | PageDef | list | dict | _Nil | None
|
||||
SExp = int | float | str | bool | Symbol | Keyword | Lambda | Macro | Component | HandlerDef | RelationDef | PageDef | QueryDef | ActionDef | list | dict | _Nil | None
|
||||
|
||||
Reference in New Issue
Block a user