Files
celery/app/services/recipe_service.py
gilesb 8ab0f05a7d Add durable pending runs and recipe list debugging
- Store pending runs in PostgreSQL for durability across restarts
- Add recovery method for orphaned runs
- Increase Celery result_expires to 7 days
- Add task_reject_on_worker_lost for automatic re-queuing
- Add logging to recipe list to debug filter issues

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-11 20:35:00 +00:00

206 lines
6.8 KiB
Python

"""
Recipe Service - business logic for recipe management.
Recipes are content-addressed YAML files stored in the cache (and IPFS).
The recipe ID is the content hash of the YAML file.
"""
import tempfile
from pathlib import Path
from typing import Optional, List, Dict, Any, Tuple
import yaml
class RecipeService:
"""
Service for managing recipes.
Recipes are stored in the content-addressed cache, not Redis.
"""
def __init__(self, redis, cache):
# Redis kept for compatibility but not used for recipe storage
self.redis = redis
self.cache = cache
async def get_recipe(self, recipe_id: str) -> Optional[Dict[str, Any]]:
"""Get a recipe by ID (content hash)."""
# Get from cache (content-addressed storage)
path = self.cache.get_by_content_hash(recipe_id)
if not path or not path.exists():
return None
with open(path) as f:
recipe_data = yaml.safe_load(f)
# Add the recipe_id to the data for convenience
if isinstance(recipe_data, dict):
recipe_data["recipe_id"] = recipe_id
# Get IPFS CID if available
ipfs_cid = self.cache.get_ipfs_cid(recipe_id)
if ipfs_cid:
recipe_data["ipfs_cid"] = ipfs_cid
return recipe_data
async def list_recipes(self, actor_id: str = None, offset: int = 0, limit: int = 20) -> list:
"""
List available recipes for a user.
L1 data is isolated per-user - only shows recipes owned by actor_id.
Note: This scans the cache for recipe files. For production,
you might want a database index of recipes by owner.
"""
import logging
logger = logging.getLogger(__name__)
# Get all cached items and filter for recipes
recipes = []
# Check if cache has a list method for recipes
if hasattr(self.cache, 'list_by_type'):
items = self.cache.list_by_type('recipe')
logger.info(f"Found {len(items)} recipe items in cache")
for content_hash in items:
recipe = await self.get_recipe(content_hash)
if recipe:
uploader = recipe.get("uploader")
logger.info(f"Recipe {content_hash[:12]}: uploader={uploader}, actor_id={actor_id}")
# Filter by actor - L1 is per-user
if actor_id is None or uploader == actor_id:
recipes.append(recipe)
# Sort by name
recipes.sort(key=lambda r: r.get("name", ""))
# Paginate
return recipes[offset:offset + limit]
async def upload_recipe(
self,
yaml_content: str,
uploader: str,
name: str = None,
description: str = None,
) -> Tuple[Optional[str], Optional[str]]:
"""
Upload a recipe from YAML content.
The recipe is stored in the cache and optionally pinned to IPFS.
Returns (recipe_id, error_message).
"""
# Validate YAML
try:
recipe_data = yaml.safe_load(yaml_content)
except yaml.YAMLError as e:
return None, f"Invalid YAML: {e}"
if not isinstance(recipe_data, dict):
return None, "Recipe must be a YAML dictionary"
# Add uploader info to the YAML before storing
recipe_data["uploader"] = uploader
if name:
recipe_data["name"] = name
if description:
recipe_data["description"] = description
# Serialize back to YAML (with added metadata)
final_yaml = yaml.dump(recipe_data, default_flow_style=False)
# Write to temp file for caching
try:
with tempfile.NamedTemporaryFile(delete=False, suffix=".yaml", mode="w") as tmp:
tmp.write(final_yaml)
tmp_path = Path(tmp.name)
# Store in cache (content-addressed, auto-pins to IPFS)
cached, ipfs_cid = self.cache.put(tmp_path, node_type="recipe", move=True)
recipe_id = cached.content_hash
return recipe_id, None
except Exception as e:
return None, f"Failed to cache recipe: {e}"
async def delete_recipe(self, recipe_id: str, actor_id: str = None) -> Tuple[bool, Optional[str]]:
"""
Delete a recipe.
Note: This only removes from local cache. IPFS copies persist.
Returns (success, error_message).
"""
# Get recipe to check ownership
recipe = await self.get_recipe(recipe_id)
if not recipe:
return False, "Recipe not found"
# Check ownership if actor_id provided
if actor_id:
recipe_owner = recipe.get("uploader")
if recipe_owner and recipe_owner != actor_id:
return False, "Cannot delete: you don't own this recipe"
# Delete from cache
try:
if hasattr(self.cache, 'delete_by_content_hash'):
success, msg = self.cache.delete_by_content_hash(recipe_id)
if not success:
return False, msg
else:
# Fallback: get path and delete directly
path = self.cache.get_by_content_hash(recipe_id)
if path and path.exists():
path.unlink()
return True, None
except Exception as e:
return False, f"Failed to delete: {e}"
def parse_yaml(self, yaml_content: str) -> Dict[str, Any]:
"""Parse recipe YAML content."""
return yaml.safe_load(yaml_content)
def build_dag(self, recipe: Dict[str, Any]) -> Dict[str, Any]:
"""
Build DAG visualization data from recipe.
Returns nodes and edges for Cytoscape.js.
"""
nodes = []
edges = []
dag = recipe.get("dag", {})
dag_nodes = dag.get("nodes", {})
output_node = dag.get("output")
for node_id, node_def in dag_nodes.items():
node_type = node_def.get("type", "EFFECT")
nodes.append({
"data": {
"id": node_id,
"label": node_id,
"nodeType": node_type,
"isOutput": node_id == output_node,
}
})
# Build edges from inputs
for input_ref in node_def.get("inputs", []):
if isinstance(input_ref, dict):
source = input_ref.get("node") or input_ref.get("input")
else:
source = input_ref
if source:
edges.append({
"data": {
"source": source,
"target": node_id,
}
})
return {"nodes": nodes, "edges": edges}