Remove dead code: execute_level, render_dog_from_cat, duplicate file_hash

- Remove execute_level() from tasks/execute.py (defined but never called)
- Remove render_dog_from_cat() from legacy_tasks.py (test convenience, never used)
- Remove duplicate file_hash() from legacy_tasks.py, import from cache_manager
- Remove unused hashlib import

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
gilesb
2026-01-13 01:21:55 +00:00
parent d603485d40
commit bf188f4671
2 changed files with 1 additions and 66 deletions

View File

@@ -5,7 +5,6 @@ Distributed rendering tasks for the Art DAG system.
Supports both single-effect runs and multi-step DAG execution.
"""
import hashlib
import json
import logging
import os
@@ -17,6 +16,7 @@ from typing import Dict, List, Optional
from celery import Task
from celery_app import app
from cache_manager import file_hash
# Import artdag components
from artdag import DAG, Node, NodeType
@@ -77,16 +77,6 @@ def _dog_effect(input_path: Path, output_path: Path, config: dict) -> Path:
return dog_process([input_path], output_path, config, None)
def file_hash(path: Path) -> str:
"""Compute SHA3-256 hash of a file."""
hasher = hashlib.sha3_256()
actual_path = path.resolve() if path.is_symlink() else path
with open(actual_path, "rb") as f:
for chunk in iter(lambda: f.read(65536), b""):
hasher.update(chunk)
return hasher.hexdigest()
# Cache directory (shared between server and worker)
CACHE_DIR = Path(os.environ.get("CACHE_DIR", str(Path.home() / ".artdag" / "cache")))
@@ -281,13 +271,6 @@ def render_effect(self, input_hash: str, effect_name: str, output_name: str) ->
return provenance
@app.task
def render_dog_from_cat() -> dict:
"""Convenience task: render cat through dog effect."""
CAT_HASH = "33268b6e167deaf018cc538de12dbe562612b33e89a749391cef855b320a269b"
return render_effect.delay(CAT_HASH, "dog", "dog-from-cat-celery").get()
@app.task(base=RenderTask, bind=True)
def execute_dag(self, dag_json: str, run_id: str = None) -> dict:
"""

View File

@@ -379,51 +379,3 @@ def execute_step(
}
@app.task(bind=True, name='tasks.execute_level')
def execute_level(
self,
steps_json: List[str],
plan_id: str,
cache_ids: Dict[str, str],
) -> dict:
"""
Execute all steps at a given dependency level.
Steps at the same level can run in parallel.
Args:
steps_json: List of JSON-serialized ExecutionSteps
plan_id: ID of the parent execution plan
cache_ids: Mapping from step_id to cache_id
Returns:
Dict with results for all steps
"""
from celery import group
# Dispatch all steps in parallel
tasks = [
execute_step.s(step_json, plan_id, cache_ids)
for step_json in steps_json
]
# Execute in parallel and collect results
job = group(tasks)
results = job.apply_async()
# Wait for completion
step_results = results.get(timeout=3600) # 1 hour timeout
# Build cache_ids from results
new_cache_ids = dict(cache_ids)
for result in step_results:
step_id = result.get("step_id")
cache_id = result.get("cache_id")
if step_id and cache_id:
new_cache_ids[step_id] = cache_id
return {
"status": "completed",
"results": step_results,
"cache_ids": new_cache_ids,
}