Remove dead code: execute_level, render_dog_from_cat, duplicate file_hash
- Remove execute_level() from tasks/execute.py (defined but never called) - Remove render_dog_from_cat() from legacy_tasks.py (test convenience, never used) - Remove duplicate file_hash() from legacy_tasks.py, import from cache_manager - Remove unused hashlib import Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -379,51 +379,3 @@ def execute_step(
|
||||
}
|
||||
|
||||
|
||||
@app.task(bind=True, name='tasks.execute_level')
|
||||
def execute_level(
|
||||
self,
|
||||
steps_json: List[str],
|
||||
plan_id: str,
|
||||
cache_ids: Dict[str, str],
|
||||
) -> dict:
|
||||
"""
|
||||
Execute all steps at a given dependency level.
|
||||
|
||||
Steps at the same level can run in parallel.
|
||||
|
||||
Args:
|
||||
steps_json: List of JSON-serialized ExecutionSteps
|
||||
plan_id: ID of the parent execution plan
|
||||
cache_ids: Mapping from step_id to cache_id
|
||||
|
||||
Returns:
|
||||
Dict with results for all steps
|
||||
"""
|
||||
from celery import group
|
||||
|
||||
# Dispatch all steps in parallel
|
||||
tasks = [
|
||||
execute_step.s(step_json, plan_id, cache_ids)
|
||||
for step_json in steps_json
|
||||
]
|
||||
|
||||
# Execute in parallel and collect results
|
||||
job = group(tasks)
|
||||
results = job.apply_async()
|
||||
|
||||
# Wait for completion
|
||||
step_results = results.get(timeout=3600) # 1 hour timeout
|
||||
|
||||
# Build cache_ids from results
|
||||
new_cache_ids = dict(cache_ids)
|
||||
for result in step_results:
|
||||
step_id = result.get("step_id")
|
||||
cache_id = result.get("cache_id")
|
||||
if step_id and cache_id:
|
||||
new_cache_ids[step_id] = cache_id
|
||||
|
||||
return {
|
||||
"status": "completed",
|
||||
"results": step_results,
|
||||
"cache_ids": new_cache_ids,
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user