Add modular app structure for L1 server refactoring
Phase 2 of the full modernization: - App factory pattern with create_app() - Settings via dataclass with env vars - Dependency injection container - Router stubs for auth, storage, api, recipes, cache, runs - Service layer stubs for run, recipe, cache - Repository layer placeholder Routes are stubs that import from legacy server.py during migration. Next: Migrate each router fully with templates. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
15
app/services/__init__.py
Normal file
15
app/services/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""
|
||||
L1 Server Services.
|
||||
|
||||
Business logic layer between routers and repositories.
|
||||
"""
|
||||
|
||||
from .run_service import RunService
|
||||
from .recipe_service import RecipeService
|
||||
from .cache_service import CacheService
|
||||
|
||||
__all__ = [
|
||||
"RunService",
|
||||
"RecipeService",
|
||||
"CacheService",
|
||||
]
|
||||
110
app/services/cache_service.py
Normal file
110
app/services/cache_service.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""
|
||||
Cache Service - business logic for cache and media management.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
from artdag_common.utils.media import detect_media_type, get_mime_type
|
||||
|
||||
|
||||
class CacheService:
|
||||
"""
|
||||
Service for managing cached content.
|
||||
|
||||
Handles content retrieval, metadata, and media type detection.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_manager, database):
|
||||
self.cache = cache_manager
|
||||
self.db = database
|
||||
|
||||
async def get_item(self, content_hash: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached item by content hash."""
|
||||
path = self.cache.get_by_content_hash(content_hash)
|
||||
if not path or not path.exists():
|
||||
return None
|
||||
|
||||
# Get metadata from database
|
||||
meta = await self.db.get_cache_item(content_hash)
|
||||
|
||||
media_type = detect_media_type(path)
|
||||
mime_type = get_mime_type(path)
|
||||
size = path.stat().st_size
|
||||
|
||||
return {
|
||||
"content_hash": content_hash,
|
||||
"path": str(path),
|
||||
"media_type": media_type,
|
||||
"mime_type": mime_type,
|
||||
"size": size,
|
||||
"name": meta.get("name") if meta else None,
|
||||
"description": meta.get("description") if meta else None,
|
||||
"tags": meta.get("tags", []) if meta else [],
|
||||
"ipfs_cid": meta.get("ipfs_cid") if meta else None,
|
||||
}
|
||||
|
||||
async def get_path(self, content_hash: str) -> Optional[Path]:
|
||||
"""Get the file path for cached content."""
|
||||
return self.cache.get_by_content_hash(content_hash)
|
||||
|
||||
async def list_items(
|
||||
self,
|
||||
actor_id: str = None,
|
||||
media_type: str = None,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
) -> Dict[str, Any]:
|
||||
"""List cached items with filters and pagination."""
|
||||
# Get items from database
|
||||
items = await self.db.list_cache_items(
|
||||
actor_id=actor_id,
|
||||
media_type=media_type,
|
||||
offset=(page - 1) * limit,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
total = await self.db.count_cache_items(actor_id=actor_id, media_type=media_type)
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": page * limit < total,
|
||||
}
|
||||
}
|
||||
|
||||
async def update_metadata(
|
||||
self,
|
||||
content_hash: str,
|
||||
name: str = None,
|
||||
description: str = None,
|
||||
tags: List[str] = None,
|
||||
) -> bool:
|
||||
"""Update item metadata."""
|
||||
return await self.db.update_cache_metadata(
|
||||
content_hash=content_hash,
|
||||
name=name,
|
||||
description=description,
|
||||
tags=tags,
|
||||
)
|
||||
|
||||
async def delete_item(self, content_hash: str) -> bool:
|
||||
"""Delete a cached item."""
|
||||
path = self.cache.get_by_content_hash(content_hash)
|
||||
if path and path.exists():
|
||||
path.unlink()
|
||||
|
||||
# Remove from database
|
||||
await self.db.delete_cache_item(content_hash)
|
||||
return True
|
||||
|
||||
def has_content(self, content_hash: str) -> bool:
|
||||
"""Check if content exists in cache."""
|
||||
return self.cache.has_content(content_hash)
|
||||
|
||||
def get_ipfs_cid(self, content_hash: str) -> Optional[str]:
|
||||
"""Get IPFS CID for cached content."""
|
||||
return self.cache.get_ipfs_cid(content_hash)
|
||||
128
app/services/recipe_service.py
Normal file
128
app/services/recipe_service.py
Normal file
@@ -0,0 +1,128 @@
|
||||
"""
|
||||
Recipe Service - business logic for recipe management.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
import json
|
||||
import yaml
|
||||
|
||||
|
||||
class RecipeService:
|
||||
"""
|
||||
Service for managing recipes.
|
||||
|
||||
Handles recipe parsing, validation, and DAG building.
|
||||
"""
|
||||
|
||||
def __init__(self, redis, cache):
|
||||
self.redis = redis
|
||||
self.cache = cache
|
||||
self.recipe_prefix = "recipe:"
|
||||
|
||||
async def get_recipe(self, recipe_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get a recipe by ID (content hash)."""
|
||||
# First check Redis
|
||||
data = self.redis.get(f"{self.recipe_prefix}{recipe_id}")
|
||||
if data:
|
||||
return json.loads(data)
|
||||
|
||||
# Fall back to cache
|
||||
path = self.cache.get_by_content_hash(recipe_id)
|
||||
if path and path.exists():
|
||||
with open(path) as f:
|
||||
return yaml.safe_load(f)
|
||||
|
||||
return None
|
||||
|
||||
async def list_recipes(self, actor_id: str = None, page: int = 1, limit: int = 20) -> Dict[str, Any]:
|
||||
"""List available recipes with pagination."""
|
||||
recipes = []
|
||||
cursor = 0
|
||||
|
||||
while True:
|
||||
cursor, keys = self.redis.scan(
|
||||
cursor=cursor,
|
||||
match=f"{self.recipe_prefix}*",
|
||||
count=100
|
||||
)
|
||||
for key in keys:
|
||||
data = self.redis.get(key)
|
||||
if data:
|
||||
recipe = json.loads(data)
|
||||
# Filter by actor if specified
|
||||
if actor_id is None or recipe.get("actor_id") == actor_id:
|
||||
recipes.append(recipe)
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
# Sort by name
|
||||
recipes.sort(key=lambda r: r.get("name", ""))
|
||||
|
||||
# Paginate
|
||||
total = len(recipes)
|
||||
start = (page - 1) * limit
|
||||
end = start + limit
|
||||
page_recipes = recipes[start:end]
|
||||
|
||||
return {
|
||||
"recipes": page_recipes,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": end < total,
|
||||
}
|
||||
}
|
||||
|
||||
async def save_recipe(self, recipe_id: str, recipe_data: Dict[str, Any]) -> None:
|
||||
"""Save a recipe to Redis."""
|
||||
self.redis.set(f"{self.recipe_prefix}{recipe_id}", json.dumps(recipe_data))
|
||||
|
||||
async def delete_recipe(self, recipe_id: str) -> bool:
|
||||
"""Delete a recipe."""
|
||||
return self.redis.delete(f"{self.recipe_prefix}{recipe_id}") > 0
|
||||
|
||||
def parse_yaml(self, yaml_content: str) -> Dict[str, Any]:
|
||||
"""Parse recipe YAML content."""
|
||||
return yaml.safe_load(yaml_content)
|
||||
|
||||
def build_dag(self, recipe: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Build DAG visualization data from recipe.
|
||||
|
||||
Returns nodes and edges for Cytoscape.js.
|
||||
"""
|
||||
nodes = []
|
||||
edges = []
|
||||
|
||||
dag = recipe.get("dag", {})
|
||||
dag_nodes = dag.get("nodes", {})
|
||||
output_node = dag.get("output")
|
||||
|
||||
for node_id, node_def in dag_nodes.items():
|
||||
node_type = node_def.get("type", "EFFECT")
|
||||
nodes.append({
|
||||
"data": {
|
||||
"id": node_id,
|
||||
"label": node_id,
|
||||
"nodeType": node_type,
|
||||
"isOutput": node_id == output_node,
|
||||
}
|
||||
})
|
||||
|
||||
# Build edges from inputs
|
||||
for input_ref in node_def.get("inputs", []):
|
||||
if isinstance(input_ref, dict):
|
||||
source = input_ref.get("node") or input_ref.get("input")
|
||||
else:
|
||||
source = input_ref
|
||||
|
||||
if source:
|
||||
edges.append({
|
||||
"data": {
|
||||
"source": source,
|
||||
"target": node_id,
|
||||
}
|
||||
})
|
||||
|
||||
return {"nodes": nodes, "edges": edges}
|
||||
113
app/services/run_service.py
Normal file
113
app/services/run_service.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""
|
||||
Run Service - business logic for run management.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
import json
|
||||
|
||||
|
||||
class RunService:
|
||||
"""
|
||||
Service for managing recipe runs.
|
||||
|
||||
Handles run lifecycle, plan loading, and result aggregation.
|
||||
"""
|
||||
|
||||
def __init__(self, redis, cache):
|
||||
self.redis = redis
|
||||
self.cache = cache
|
||||
self.run_prefix = "run:"
|
||||
|
||||
async def get_run(self, run_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get a run by ID."""
|
||||
data = self.redis.get(f"{self.run_prefix}{run_id}")
|
||||
if not data:
|
||||
return None
|
||||
return json.loads(data)
|
||||
|
||||
async def list_runs(self, actor_id: str, page: int = 1, limit: int = 20) -> Dict[str, Any]:
|
||||
"""List runs for a user with pagination."""
|
||||
# Get all runs and filter by actor
|
||||
# TODO: Use Redis index for efficient filtering
|
||||
all_runs = []
|
||||
cursor = 0
|
||||
|
||||
while True:
|
||||
cursor, keys = self.redis.scan(
|
||||
cursor=cursor,
|
||||
match=f"{self.run_prefix}*",
|
||||
count=100
|
||||
)
|
||||
for key in keys:
|
||||
data = self.redis.get(key)
|
||||
if data:
|
||||
run = json.loads(data)
|
||||
if run.get("actor_id") == actor_id or run.get("username") == actor_id:
|
||||
all_runs.append(run)
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
# Sort by created_at descending
|
||||
all_runs.sort(key=lambda r: r.get("created_at", ""), reverse=True)
|
||||
|
||||
# Paginate
|
||||
total = len(all_runs)
|
||||
start = (page - 1) * limit
|
||||
end = start + limit
|
||||
runs = all_runs[start:end]
|
||||
|
||||
return {
|
||||
"runs": runs,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": end < total,
|
||||
}
|
||||
}
|
||||
|
||||
async def create_run(
|
||||
self,
|
||||
run_id: str,
|
||||
recipe_id: str,
|
||||
inputs: Dict[str, str],
|
||||
actor_id: str,
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a new run."""
|
||||
from datetime import datetime
|
||||
|
||||
run = {
|
||||
"run_id": run_id,
|
||||
"recipe": f"recipe:{recipe_id}",
|
||||
"inputs": inputs,
|
||||
"actor_id": actor_id,
|
||||
"status": "pending",
|
||||
"created_at": datetime.utcnow().isoformat(),
|
||||
}
|
||||
|
||||
self.redis.set(f"{self.run_prefix}{run_id}", json.dumps(run))
|
||||
return run
|
||||
|
||||
async def update_run(self, run_id: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update a run's fields."""
|
||||
run = await self.get_run(run_id)
|
||||
if not run:
|
||||
return None
|
||||
|
||||
run.update(updates)
|
||||
self.redis.set(f"{self.run_prefix}{run_id}", json.dumps(run))
|
||||
return run
|
||||
|
||||
async def delete_run(self, run_id: str) -> bool:
|
||||
"""Delete a run."""
|
||||
return self.redis.delete(f"{self.run_prefix}{run_id}") > 0
|
||||
|
||||
async def load_plan(self, run_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Load execution plan for a run."""
|
||||
from ..config import settings
|
||||
|
||||
plan_path = settings.plan_cache_dir / f"{run_id}.json"
|
||||
if plan_path.exists():
|
||||
with open(plan_path) as f:
|
||||
return json.load(f)
|
||||
return None
|
||||
Reference in New Issue
Block a user