Squashed 'core/' content from commit 4957443
git-subtree-dir: core git-subtree-split: 4957443184ae0eb6323635a90a19acffb3e01d07
This commit is contained in:
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Tests for new standalone primitive engine
|
||||
613
tests/test_activities.py
Normal file
613
tests/test_activities.py
Normal file
@@ -0,0 +1,613 @@
|
||||
# tests/test_activities.py
|
||||
"""Tests for the activity tracking and cache deletion system."""
|
||||
|
||||
import tempfile
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from artdag import Cache, DAG, Node, NodeType
|
||||
from artdag.activities import Activity, ActivityStore, ActivityManager, make_is_shared_fn
|
||||
|
||||
|
||||
class MockActivityPubStore:
|
||||
"""Mock ActivityPub store for testing is_shared functionality."""
|
||||
|
||||
def __init__(self):
|
||||
self._shared_hashes = set()
|
||||
|
||||
def mark_shared(self, cid: str):
|
||||
"""Mark a content hash as shared (published)."""
|
||||
self._shared_hashes.add(cid)
|
||||
|
||||
def find_by_object_hash(self, cid: str):
|
||||
"""Return mock activities for shared hashes."""
|
||||
if cid in self._shared_hashes:
|
||||
return [MockActivity("Create")]
|
||||
return []
|
||||
|
||||
|
||||
class MockActivity:
|
||||
"""Mock ActivityPub activity."""
|
||||
def __init__(self, activity_type: str):
|
||||
self.activity_type = activity_type
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_dir():
|
||||
"""Create a temporary directory for tests."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
yield Path(tmpdir)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cache(temp_dir):
|
||||
"""Create a cache instance."""
|
||||
return Cache(temp_dir / "cache")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activity_store(temp_dir):
|
||||
"""Create an activity store instance."""
|
||||
return ActivityStore(temp_dir / "activities")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ap_store():
|
||||
"""Create a mock ActivityPub store."""
|
||||
return MockActivityPubStore()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def manager(cache, activity_store, ap_store):
|
||||
"""Create an ActivityManager instance."""
|
||||
return ActivityManager(
|
||||
cache=cache,
|
||||
activity_store=activity_store,
|
||||
is_shared_fn=make_is_shared_fn(ap_store),
|
||||
)
|
||||
|
||||
|
||||
def create_test_file(path: Path, content: str = "test content") -> Path:
|
||||
"""Create a test file with content."""
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(content)
|
||||
return path
|
||||
|
||||
|
||||
class TestCacheEntryContentHash:
|
||||
"""Tests for cid in CacheEntry."""
|
||||
|
||||
def test_put_computes_cid(self, cache, temp_dir):
|
||||
"""put() should compute and store cid."""
|
||||
test_file = create_test_file(temp_dir / "input.txt", "hello world")
|
||||
|
||||
cache.put("node1", test_file, "test")
|
||||
entry = cache.get_entry("node1")
|
||||
|
||||
assert entry is not None
|
||||
assert entry.cid != ""
|
||||
assert len(entry.cid) == 64 # SHA-3-256 hex
|
||||
|
||||
def test_same_content_same_hash(self, cache, temp_dir):
|
||||
"""Same file content should produce same hash."""
|
||||
file1 = create_test_file(temp_dir / "file1.txt", "identical content")
|
||||
file2 = create_test_file(temp_dir / "file2.txt", "identical content")
|
||||
|
||||
cache.put("node1", file1, "test")
|
||||
cache.put("node2", file2, "test")
|
||||
|
||||
entry1 = cache.get_entry("node1")
|
||||
entry2 = cache.get_entry("node2")
|
||||
|
||||
assert entry1.cid == entry2.cid
|
||||
|
||||
def test_different_content_different_hash(self, cache, temp_dir):
|
||||
"""Different file content should produce different hash."""
|
||||
file1 = create_test_file(temp_dir / "file1.txt", "content A")
|
||||
file2 = create_test_file(temp_dir / "file2.txt", "content B")
|
||||
|
||||
cache.put("node1", file1, "test")
|
||||
cache.put("node2", file2, "test")
|
||||
|
||||
entry1 = cache.get_entry("node1")
|
||||
entry2 = cache.get_entry("node2")
|
||||
|
||||
assert entry1.cid != entry2.cid
|
||||
|
||||
def test_find_by_cid(self, cache, temp_dir):
|
||||
"""Should find entry by content hash."""
|
||||
test_file = create_test_file(temp_dir / "input.txt", "unique content")
|
||||
cache.put("node1", test_file, "test")
|
||||
|
||||
entry = cache.get_entry("node1")
|
||||
found = cache.find_by_cid(entry.cid)
|
||||
|
||||
assert found is not None
|
||||
assert found.node_id == "node1"
|
||||
|
||||
def test_cid_persists(self, temp_dir):
|
||||
"""cid should persist across cache reloads."""
|
||||
cache1 = Cache(temp_dir / "cache")
|
||||
test_file = create_test_file(temp_dir / "input.txt", "persistent")
|
||||
cache1.put("node1", test_file, "test")
|
||||
original_hash = cache1.get_entry("node1").cid
|
||||
|
||||
# Create new cache instance (reload from disk)
|
||||
cache2 = Cache(temp_dir / "cache")
|
||||
entry = cache2.get_entry("node1")
|
||||
|
||||
assert entry.cid == original_hash
|
||||
|
||||
|
||||
class TestActivity:
|
||||
"""Tests for Activity dataclass."""
|
||||
|
||||
def test_activity_from_dag(self):
|
||||
"""Activity.from_dag() should classify nodes correctly."""
|
||||
# Build a simple DAG: source -> transform -> output
|
||||
dag = DAG()
|
||||
source = Node(NodeType.SOURCE, {"path": "/test.mp4"})
|
||||
transform = Node(NodeType.TRANSFORM, {"effect": "blur"}, inputs=[source.node_id])
|
||||
output = Node(NodeType.RESIZE, {"width": 100}, inputs=[transform.node_id])
|
||||
|
||||
dag.add_node(source)
|
||||
dag.add_node(transform)
|
||||
dag.add_node(output)
|
||||
dag.set_output(output.node_id)
|
||||
|
||||
activity = Activity.from_dag(dag)
|
||||
|
||||
assert source.node_id in activity.input_ids
|
||||
assert activity.output_id == output.node_id
|
||||
assert transform.node_id in activity.intermediate_ids
|
||||
|
||||
def test_activity_with_multiple_inputs(self):
|
||||
"""Activity should handle DAGs with multiple source nodes."""
|
||||
dag = DAG()
|
||||
source1 = Node(NodeType.SOURCE, {"path": "/a.mp4"})
|
||||
source2 = Node(NodeType.SOURCE, {"path": "/b.mp4"})
|
||||
sequence = Node(NodeType.SEQUENCE, {}, inputs=[source1.node_id, source2.node_id])
|
||||
|
||||
dag.add_node(source1)
|
||||
dag.add_node(source2)
|
||||
dag.add_node(sequence)
|
||||
dag.set_output(sequence.node_id)
|
||||
|
||||
activity = Activity.from_dag(dag)
|
||||
|
||||
assert len(activity.input_ids) == 2
|
||||
assert source1.node_id in activity.input_ids
|
||||
assert source2.node_id in activity.input_ids
|
||||
assert activity.output_id == sequence.node_id
|
||||
assert len(activity.intermediate_ids) == 0
|
||||
|
||||
def test_activity_serialization(self):
|
||||
"""Activity should serialize and deserialize correctly."""
|
||||
dag = DAG()
|
||||
source = Node(NodeType.SOURCE, {"path": "/test.mp4"})
|
||||
dag.add_node(source)
|
||||
dag.set_output(source.node_id)
|
||||
|
||||
activity = Activity.from_dag(dag)
|
||||
data = activity.to_dict()
|
||||
restored = Activity.from_dict(data)
|
||||
|
||||
assert restored.activity_id == activity.activity_id
|
||||
assert restored.input_ids == activity.input_ids
|
||||
assert restored.output_id == activity.output_id
|
||||
assert restored.intermediate_ids == activity.intermediate_ids
|
||||
|
||||
def test_all_node_ids(self):
|
||||
"""all_node_ids should return all nodes."""
|
||||
activity = Activity(
|
||||
activity_id="test",
|
||||
input_ids=["a", "b"],
|
||||
output_id="c",
|
||||
intermediate_ids=["d", "e"],
|
||||
created_at=time.time(),
|
||||
)
|
||||
|
||||
all_ids = activity.all_node_ids
|
||||
assert set(all_ids) == {"a", "b", "c", "d", "e"}
|
||||
|
||||
|
||||
class TestActivityStore:
|
||||
"""Tests for ActivityStore persistence."""
|
||||
|
||||
def test_add_and_get(self, activity_store):
|
||||
"""Should add and retrieve activities."""
|
||||
activity = Activity(
|
||||
activity_id="test1",
|
||||
input_ids=["input1"],
|
||||
output_id="output1",
|
||||
intermediate_ids=["inter1"],
|
||||
created_at=time.time(),
|
||||
)
|
||||
|
||||
activity_store.add(activity)
|
||||
retrieved = activity_store.get("test1")
|
||||
|
||||
assert retrieved is not None
|
||||
assert retrieved.activity_id == "test1"
|
||||
|
||||
def test_persistence(self, temp_dir):
|
||||
"""Activities should persist across store reloads."""
|
||||
store1 = ActivityStore(temp_dir / "activities")
|
||||
activity = Activity(
|
||||
activity_id="persist",
|
||||
input_ids=["i1"],
|
||||
output_id="o1",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
store1.add(activity)
|
||||
|
||||
# Reload
|
||||
store2 = ActivityStore(temp_dir / "activities")
|
||||
retrieved = store2.get("persist")
|
||||
|
||||
assert retrieved is not None
|
||||
assert retrieved.activity_id == "persist"
|
||||
|
||||
def test_find_by_input_ids(self, activity_store):
|
||||
"""Should find activities with matching inputs."""
|
||||
activity1 = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["x", "y"],
|
||||
output_id="o1",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity2 = Activity(
|
||||
activity_id="a2",
|
||||
input_ids=["y", "x"], # Same inputs, different order
|
||||
output_id="o2",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity3 = Activity(
|
||||
activity_id="a3",
|
||||
input_ids=["z"], # Different inputs
|
||||
output_id="o3",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
|
||||
activity_store.add(activity1)
|
||||
activity_store.add(activity2)
|
||||
activity_store.add(activity3)
|
||||
|
||||
found = activity_store.find_by_input_ids(["x", "y"])
|
||||
assert len(found) == 2
|
||||
assert {a.activity_id for a in found} == {"a1", "a2"}
|
||||
|
||||
def test_find_using_node(self, activity_store):
|
||||
"""Should find activities referencing a node."""
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["input1"],
|
||||
output_id="output1",
|
||||
intermediate_ids=["inter1"],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
# Should find by input
|
||||
found = activity_store.find_using_node("input1")
|
||||
assert len(found) == 1
|
||||
|
||||
# Should find by intermediate
|
||||
found = activity_store.find_using_node("inter1")
|
||||
assert len(found) == 1
|
||||
|
||||
# Should find by output
|
||||
found = activity_store.find_using_node("output1")
|
||||
assert len(found) == 1
|
||||
|
||||
# Should not find unknown
|
||||
found = activity_store.find_using_node("unknown")
|
||||
assert len(found) == 0
|
||||
|
||||
def test_remove(self, activity_store):
|
||||
"""Should remove activities."""
|
||||
activity = Activity(
|
||||
activity_id="to_remove",
|
||||
input_ids=["i"],
|
||||
output_id="o",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
assert activity_store.get("to_remove") is not None
|
||||
|
||||
result = activity_store.remove("to_remove")
|
||||
assert result is True
|
||||
assert activity_store.get("to_remove") is None
|
||||
|
||||
|
||||
class TestActivityManager:
|
||||
"""Tests for ActivityManager deletion rules."""
|
||||
|
||||
def test_can_delete_orphaned_entry(self, manager, cache, temp_dir):
|
||||
"""Orphaned entries (not in any activity) can be deleted."""
|
||||
test_file = create_test_file(temp_dir / "orphan.txt", "orphan")
|
||||
cache.put("orphan_node", test_file, "test")
|
||||
|
||||
assert manager.can_delete_cache_entry("orphan_node") is True
|
||||
|
||||
def test_cannot_delete_shared_entry(self, manager, cache, temp_dir, ap_store):
|
||||
"""Shared entries (ActivityPub published) cannot be deleted."""
|
||||
test_file = create_test_file(temp_dir / "shared.txt", "shared content")
|
||||
cache.put("shared_node", test_file, "test")
|
||||
|
||||
# Mark as shared
|
||||
entry = cache.get_entry("shared_node")
|
||||
ap_store.mark_shared(entry.cid)
|
||||
|
||||
assert manager.can_delete_cache_entry("shared_node") is False
|
||||
|
||||
def test_cannot_delete_activity_input(self, manager, cache, activity_store, temp_dir):
|
||||
"""Activity inputs cannot be deleted."""
|
||||
test_file = create_test_file(temp_dir / "input.txt", "input")
|
||||
cache.put("input_node", test_file, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["input_node"],
|
||||
output_id="output_node",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
assert manager.can_delete_cache_entry("input_node") is False
|
||||
|
||||
def test_cannot_delete_activity_output(self, manager, cache, activity_store, temp_dir):
|
||||
"""Activity outputs cannot be deleted."""
|
||||
test_file = create_test_file(temp_dir / "output.txt", "output")
|
||||
cache.put("output_node", test_file, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["input_node"],
|
||||
output_id="output_node",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
assert manager.can_delete_cache_entry("output_node") is False
|
||||
|
||||
def test_can_delete_intermediate(self, manager, cache, activity_store, temp_dir):
|
||||
"""Intermediate entries can be deleted (they're reconstructible)."""
|
||||
test_file = create_test_file(temp_dir / "inter.txt", "intermediate")
|
||||
cache.put("inter_node", test_file, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["input_node"],
|
||||
output_id="output_node",
|
||||
intermediate_ids=["inter_node"],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
assert manager.can_delete_cache_entry("inter_node") is True
|
||||
|
||||
def test_can_discard_activity_no_shared(self, manager, activity_store):
|
||||
"""Activity can be discarded if nothing is shared."""
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["i1"],
|
||||
output_id="o1",
|
||||
intermediate_ids=["m1"],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
assert manager.can_discard_activity("a1") is True
|
||||
|
||||
def test_cannot_discard_activity_with_shared_output(self, manager, cache, activity_store, temp_dir, ap_store):
|
||||
"""Activity cannot be discarded if output is shared."""
|
||||
test_file = create_test_file(temp_dir / "output.txt", "output content")
|
||||
cache.put("o1", test_file, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["i1"],
|
||||
output_id="o1",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
# Mark output as shared
|
||||
entry = cache.get_entry("o1")
|
||||
ap_store.mark_shared(entry.cid)
|
||||
|
||||
assert manager.can_discard_activity("a1") is False
|
||||
|
||||
def test_cannot_discard_activity_with_shared_input(self, manager, cache, activity_store, temp_dir, ap_store):
|
||||
"""Activity cannot be discarded if input is shared."""
|
||||
test_file = create_test_file(temp_dir / "input.txt", "input content")
|
||||
cache.put("i1", test_file, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["i1"],
|
||||
output_id="o1",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
entry = cache.get_entry("i1")
|
||||
ap_store.mark_shared(entry.cid)
|
||||
|
||||
assert manager.can_discard_activity("a1") is False
|
||||
|
||||
def test_discard_activity_deletes_intermediates(self, manager, cache, activity_store, temp_dir):
|
||||
"""Discarding activity should delete intermediate cache entries."""
|
||||
# Create cache entries
|
||||
input_file = create_test_file(temp_dir / "input.txt", "input")
|
||||
inter_file = create_test_file(temp_dir / "inter.txt", "intermediate")
|
||||
output_file = create_test_file(temp_dir / "output.txt", "output")
|
||||
|
||||
cache.put("i1", input_file, "test")
|
||||
cache.put("m1", inter_file, "test")
|
||||
cache.put("o1", output_file, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["i1"],
|
||||
output_id="o1",
|
||||
intermediate_ids=["m1"],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
# Discard
|
||||
result = manager.discard_activity("a1")
|
||||
|
||||
assert result is True
|
||||
assert cache.has("m1") is False # Intermediate deleted
|
||||
assert activity_store.get("a1") is None # Activity removed
|
||||
|
||||
def test_discard_activity_deletes_orphaned_output(self, manager, cache, activity_store, temp_dir):
|
||||
"""Discarding activity should delete output if orphaned."""
|
||||
output_file = create_test_file(temp_dir / "output.txt", "output")
|
||||
cache.put("o1", output_file, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=[],
|
||||
output_id="o1",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
manager.discard_activity("a1")
|
||||
|
||||
assert cache.has("o1") is False # Orphaned output deleted
|
||||
|
||||
def test_discard_activity_keeps_shared_output(self, manager, cache, activity_store, temp_dir, ap_store):
|
||||
"""Discarding should fail if output is shared."""
|
||||
output_file = create_test_file(temp_dir / "output.txt", "shared output")
|
||||
cache.put("o1", output_file, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=[],
|
||||
output_id="o1",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
entry = cache.get_entry("o1")
|
||||
ap_store.mark_shared(entry.cid)
|
||||
|
||||
result = manager.discard_activity("a1")
|
||||
|
||||
assert result is False # Cannot discard
|
||||
assert cache.has("o1") is True # Output preserved
|
||||
assert activity_store.get("a1") is not None # Activity preserved
|
||||
|
||||
def test_discard_keeps_input_used_elsewhere(self, manager, cache, activity_store, temp_dir):
|
||||
"""Input used by another activity should not be deleted."""
|
||||
input_file = create_test_file(temp_dir / "input.txt", "shared input")
|
||||
cache.put("shared_input", input_file, "test")
|
||||
|
||||
activity1 = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["shared_input"],
|
||||
output_id="o1",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity2 = Activity(
|
||||
activity_id="a2",
|
||||
input_ids=["shared_input"],
|
||||
output_id="o2",
|
||||
intermediate_ids=[],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity1)
|
||||
activity_store.add(activity2)
|
||||
|
||||
manager.discard_activity("a1")
|
||||
|
||||
# Input still used by a2
|
||||
assert cache.has("shared_input") is True
|
||||
|
||||
def test_get_deletable_entries(self, manager, cache, activity_store, temp_dir):
|
||||
"""Should list all deletable entries."""
|
||||
# Orphan (deletable)
|
||||
orphan = create_test_file(temp_dir / "orphan.txt", "orphan")
|
||||
cache.put("orphan", orphan, "test")
|
||||
|
||||
# Intermediate (deletable)
|
||||
inter = create_test_file(temp_dir / "inter.txt", "inter")
|
||||
cache.put("inter", inter, "test")
|
||||
|
||||
# Input (not deletable)
|
||||
inp = create_test_file(temp_dir / "input.txt", "input")
|
||||
cache.put("input", inp, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["input"],
|
||||
output_id="output",
|
||||
intermediate_ids=["inter"],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
deletable = manager.get_deletable_entries()
|
||||
deletable_ids = {e.node_id for e in deletable}
|
||||
|
||||
assert "orphan" in deletable_ids
|
||||
assert "inter" in deletable_ids
|
||||
assert "input" not in deletable_ids
|
||||
|
||||
def test_cleanup_intermediates(self, manager, cache, activity_store, temp_dir):
|
||||
"""cleanup_intermediates() should delete all intermediate entries."""
|
||||
inter1 = create_test_file(temp_dir / "i1.txt", "inter1")
|
||||
inter2 = create_test_file(temp_dir / "i2.txt", "inter2")
|
||||
cache.put("inter1", inter1, "test")
|
||||
cache.put("inter2", inter2, "test")
|
||||
|
||||
activity = Activity(
|
||||
activity_id="a1",
|
||||
input_ids=["input"],
|
||||
output_id="output",
|
||||
intermediate_ids=["inter1", "inter2"],
|
||||
created_at=time.time(),
|
||||
)
|
||||
activity_store.add(activity)
|
||||
|
||||
deleted = manager.cleanup_intermediates()
|
||||
|
||||
assert deleted == 2
|
||||
assert cache.has("inter1") is False
|
||||
assert cache.has("inter2") is False
|
||||
|
||||
|
||||
class TestMakeIsSharedFn:
|
||||
"""Tests for make_is_shared_fn factory."""
|
||||
|
||||
def test_returns_true_for_shared(self, ap_store):
|
||||
"""Should return True for shared content."""
|
||||
is_shared = make_is_shared_fn(ap_store)
|
||||
ap_store.mark_shared("hash123")
|
||||
|
||||
assert is_shared("hash123") is True
|
||||
|
||||
def test_returns_false_for_not_shared(self, ap_store):
|
||||
"""Should return False for non-shared content."""
|
||||
is_shared = make_is_shared_fn(ap_store)
|
||||
|
||||
assert is_shared("unknown_hash") is False
|
||||
163
tests/test_cache.py
Normal file
163
tests/test_cache.py
Normal file
@@ -0,0 +1,163 @@
|
||||
# tests/test_primitive_new/test_cache.py
|
||||
"""Tests for primitive cache module."""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from artdag.cache import Cache, CacheStats
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cache_dir():
|
||||
"""Create temporary cache directory."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
yield Path(tmpdir)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cache(cache_dir):
|
||||
"""Create cache instance."""
|
||||
return Cache(cache_dir)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_file(cache_dir):
|
||||
"""Create a sample file to cache."""
|
||||
file_path = cache_dir / "sample.txt"
|
||||
file_path.write_text("test content")
|
||||
return file_path
|
||||
|
||||
|
||||
class TestCache:
|
||||
"""Test Cache class."""
|
||||
|
||||
def test_cache_creation(self, cache_dir):
|
||||
"""Test cache directory is created."""
|
||||
cache = Cache(cache_dir / "new_cache")
|
||||
assert cache.cache_dir.exists()
|
||||
|
||||
def test_cache_put_and_get(self, cache, sample_file):
|
||||
"""Test putting and getting from cache."""
|
||||
node_id = "abc123"
|
||||
cached_path = cache.put(node_id, sample_file, "TEST")
|
||||
|
||||
assert cached_path.exists()
|
||||
assert cache.has(node_id)
|
||||
|
||||
retrieved = cache.get(node_id)
|
||||
assert retrieved == cached_path
|
||||
|
||||
def test_cache_miss(self, cache):
|
||||
"""Test cache miss returns None."""
|
||||
result = cache.get("nonexistent")
|
||||
assert result is None
|
||||
|
||||
def test_cache_stats_hit_miss(self, cache, sample_file):
|
||||
"""Test cache hit/miss stats."""
|
||||
cache.put("abc123", sample_file, "TEST")
|
||||
|
||||
# Miss
|
||||
cache.get("nonexistent")
|
||||
assert cache.stats.misses == 1
|
||||
|
||||
# Hit
|
||||
cache.get("abc123")
|
||||
assert cache.stats.hits == 1
|
||||
|
||||
assert cache.stats.hit_rate == 0.5
|
||||
|
||||
def test_cache_remove(self, cache, sample_file):
|
||||
"""Test removing from cache."""
|
||||
node_id = "abc123"
|
||||
cache.put(node_id, sample_file, "TEST")
|
||||
assert cache.has(node_id)
|
||||
|
||||
cache.remove(node_id)
|
||||
assert not cache.has(node_id)
|
||||
|
||||
def test_cache_clear(self, cache, sample_file):
|
||||
"""Test clearing cache."""
|
||||
cache.put("node1", sample_file, "TEST")
|
||||
cache.put("node2", sample_file, "TEST")
|
||||
|
||||
assert cache.stats.total_entries == 2
|
||||
|
||||
cache.clear()
|
||||
|
||||
assert cache.stats.total_entries == 0
|
||||
assert not cache.has("node1")
|
||||
assert not cache.has("node2")
|
||||
|
||||
def test_cache_preserves_extension(self, cache, cache_dir):
|
||||
"""Test that cache preserves file extension."""
|
||||
mp4_file = cache_dir / "video.mp4"
|
||||
mp4_file.write_text("fake video")
|
||||
|
||||
cached = cache.put("video_node", mp4_file, "SOURCE")
|
||||
assert cached.suffix == ".mp4"
|
||||
|
||||
def test_cache_list_entries(self, cache, sample_file):
|
||||
"""Test listing cache entries."""
|
||||
cache.put("node1", sample_file, "TYPE1")
|
||||
cache.put("node2", sample_file, "TYPE2")
|
||||
|
||||
entries = cache.list_entries()
|
||||
assert len(entries) == 2
|
||||
|
||||
node_ids = {e.node_id for e in entries}
|
||||
assert "node1" in node_ids
|
||||
assert "node2" in node_ids
|
||||
|
||||
def test_cache_persistence(self, cache_dir, sample_file):
|
||||
"""Test cache persists across instances."""
|
||||
# First instance
|
||||
cache1 = Cache(cache_dir)
|
||||
cache1.put("abc123", sample_file, "TEST")
|
||||
|
||||
# Second instance loads from disk
|
||||
cache2 = Cache(cache_dir)
|
||||
assert cache2.has("abc123")
|
||||
|
||||
def test_cache_prune_by_age(self, cache, sample_file):
|
||||
"""Test pruning by age."""
|
||||
import time
|
||||
|
||||
cache.put("old_node", sample_file, "TEST")
|
||||
|
||||
# Manually set old creation time
|
||||
entry = cache._entries["old_node"]
|
||||
entry.created_at = time.time() - 3600 # 1 hour ago
|
||||
|
||||
removed = cache.prune(max_age_seconds=1800) # 30 minutes
|
||||
|
||||
assert removed == 1
|
||||
assert not cache.has("old_node")
|
||||
|
||||
def test_cache_output_path(self, cache):
|
||||
"""Test getting output path for node."""
|
||||
path = cache.get_output_path("abc123", ".mp4")
|
||||
assert path.suffix == ".mp4"
|
||||
assert "abc123" in str(path)
|
||||
assert path.parent.exists()
|
||||
|
||||
|
||||
class TestCacheStats:
|
||||
"""Test CacheStats class."""
|
||||
|
||||
def test_hit_rate_calculation(self):
|
||||
"""Test hit rate calculation."""
|
||||
stats = CacheStats()
|
||||
|
||||
stats.record_hit()
|
||||
stats.record_hit()
|
||||
stats.record_miss()
|
||||
|
||||
assert stats.hits == 2
|
||||
assert stats.misses == 1
|
||||
assert abs(stats.hit_rate - 0.666) < 0.01
|
||||
|
||||
def test_initial_hit_rate(self):
|
||||
"""Test hit rate with no requests."""
|
||||
stats = CacheStats()
|
||||
assert stats.hit_rate == 0.0
|
||||
271
tests/test_dag.py
Normal file
271
tests/test_dag.py
Normal file
@@ -0,0 +1,271 @@
|
||||
# tests/test_primitive_new/test_dag.py
|
||||
"""Tests for primitive DAG data structures."""
|
||||
|
||||
import pytest
|
||||
from artdag.dag import Node, NodeType, DAG, DAGBuilder
|
||||
|
||||
|
||||
class TestNode:
|
||||
"""Test Node class."""
|
||||
|
||||
def test_node_creation(self):
|
||||
"""Test basic node creation."""
|
||||
node = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
assert node.node_type == NodeType.SOURCE
|
||||
assert node.config == {"path": "/test.mp4"}
|
||||
assert node.node_id is not None
|
||||
|
||||
def test_node_id_is_content_addressed(self):
|
||||
"""Same content produces same node_id."""
|
||||
node1 = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
node2 = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
assert node1.node_id == node2.node_id
|
||||
|
||||
def test_different_config_different_id(self):
|
||||
"""Different config produces different node_id."""
|
||||
node1 = Node(node_type=NodeType.SOURCE, config={"path": "/test1.mp4"})
|
||||
node2 = Node(node_type=NodeType.SOURCE, config={"path": "/test2.mp4"})
|
||||
assert node1.node_id != node2.node_id
|
||||
|
||||
def test_node_with_inputs(self):
|
||||
"""Node with inputs includes them in ID."""
|
||||
node1 = Node(node_type=NodeType.SEGMENT, config={"duration": 5}, inputs=["abc123"])
|
||||
node2 = Node(node_type=NodeType.SEGMENT, config={"duration": 5}, inputs=["abc123"])
|
||||
node3 = Node(node_type=NodeType.SEGMENT, config={"duration": 5}, inputs=["def456"])
|
||||
|
||||
assert node1.node_id == node2.node_id
|
||||
assert node1.node_id != node3.node_id
|
||||
|
||||
def test_node_serialization(self):
|
||||
"""Test node to_dict and from_dict."""
|
||||
original = Node(
|
||||
node_type=NodeType.SEGMENT,
|
||||
config={"duration": 5.0, "offset": 10.0},
|
||||
inputs=["abc123"],
|
||||
name="my_segment",
|
||||
)
|
||||
data = original.to_dict()
|
||||
restored = Node.from_dict(data)
|
||||
|
||||
assert restored.node_type == original.node_type
|
||||
assert restored.config == original.config
|
||||
assert restored.inputs == original.inputs
|
||||
assert restored.name == original.name
|
||||
assert restored.node_id == original.node_id
|
||||
|
||||
def test_custom_node_type(self):
|
||||
"""Test node with custom string type."""
|
||||
node = Node(node_type="CUSTOM_TYPE", config={"custom": True})
|
||||
assert node.node_type == "CUSTOM_TYPE"
|
||||
assert node.node_id is not None
|
||||
|
||||
|
||||
class TestDAG:
|
||||
"""Test DAG class."""
|
||||
|
||||
def test_dag_creation(self):
|
||||
"""Test basic DAG creation."""
|
||||
dag = DAG()
|
||||
assert len(dag.nodes) == 0
|
||||
assert dag.output_id is None
|
||||
|
||||
def test_add_node(self):
|
||||
"""Test adding nodes to DAG."""
|
||||
dag = DAG()
|
||||
node = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
node_id = dag.add_node(node)
|
||||
|
||||
assert node_id in dag.nodes
|
||||
assert dag.nodes[node_id] == node
|
||||
|
||||
def test_node_deduplication(self):
|
||||
"""Same node added twice returns same ID."""
|
||||
dag = DAG()
|
||||
node1 = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
node2 = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
|
||||
id1 = dag.add_node(node1)
|
||||
id2 = dag.add_node(node2)
|
||||
|
||||
assert id1 == id2
|
||||
assert len(dag.nodes) == 1
|
||||
|
||||
def test_set_output(self):
|
||||
"""Test setting output node."""
|
||||
dag = DAG()
|
||||
node = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
node_id = dag.add_node(node)
|
||||
dag.set_output(node_id)
|
||||
|
||||
assert dag.output_id == node_id
|
||||
|
||||
def test_set_output_invalid(self):
|
||||
"""Setting invalid output raises error."""
|
||||
dag = DAG()
|
||||
with pytest.raises(ValueError):
|
||||
dag.set_output("nonexistent")
|
||||
|
||||
def test_topological_order(self):
|
||||
"""Test topological ordering."""
|
||||
dag = DAG()
|
||||
|
||||
# Create simple chain: source -> segment -> output
|
||||
source = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
source_id = dag.add_node(source)
|
||||
|
||||
segment = Node(node_type=NodeType.SEGMENT, config={"duration": 5}, inputs=[source_id])
|
||||
segment_id = dag.add_node(segment)
|
||||
|
||||
dag.set_output(segment_id)
|
||||
order = dag.topological_order()
|
||||
|
||||
# Source must come before segment
|
||||
assert order.index(source_id) < order.index(segment_id)
|
||||
|
||||
def test_validate_valid_dag(self):
|
||||
"""Test validation of valid DAG."""
|
||||
dag = DAG()
|
||||
node = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
node_id = dag.add_node(node)
|
||||
dag.set_output(node_id)
|
||||
|
||||
errors = dag.validate()
|
||||
assert len(errors) == 0
|
||||
|
||||
def test_validate_no_output(self):
|
||||
"""DAG without output is invalid."""
|
||||
dag = DAG()
|
||||
node = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
dag.add_node(node)
|
||||
|
||||
errors = dag.validate()
|
||||
assert len(errors) > 0
|
||||
assert any("output" in e.lower() for e in errors)
|
||||
|
||||
def test_validate_missing_input(self):
|
||||
"""DAG with missing input reference is invalid."""
|
||||
dag = DAG()
|
||||
node = Node(node_type=NodeType.SEGMENT, config={"duration": 5}, inputs=["nonexistent"])
|
||||
node_id = dag.add_node(node)
|
||||
dag.set_output(node_id)
|
||||
|
||||
errors = dag.validate()
|
||||
assert len(errors) > 0
|
||||
assert any("missing" in e.lower() for e in errors)
|
||||
|
||||
def test_dag_serialization(self):
|
||||
"""Test DAG to_dict and from_dict."""
|
||||
dag = DAG(metadata={"name": "test_dag"})
|
||||
source = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
source_id = dag.add_node(source)
|
||||
dag.set_output(source_id)
|
||||
|
||||
data = dag.to_dict()
|
||||
restored = DAG.from_dict(data)
|
||||
|
||||
assert len(restored.nodes) == len(dag.nodes)
|
||||
assert restored.output_id == dag.output_id
|
||||
assert restored.metadata == dag.metadata
|
||||
|
||||
def test_dag_json(self):
|
||||
"""Test DAG JSON serialization."""
|
||||
dag = DAG()
|
||||
node = Node(node_type=NodeType.SOURCE, config={"path": "/test.mp4"})
|
||||
node_id = dag.add_node(node)
|
||||
dag.set_output(node_id)
|
||||
|
||||
json_str = dag.to_json()
|
||||
restored = DAG.from_json(json_str)
|
||||
|
||||
assert len(restored.nodes) == 1
|
||||
assert restored.output_id == node_id
|
||||
|
||||
|
||||
class TestDAGBuilder:
|
||||
"""Test DAGBuilder class."""
|
||||
|
||||
def test_builder_source(self):
|
||||
"""Test building source node."""
|
||||
builder = DAGBuilder()
|
||||
source_id = builder.source("/test.mp4")
|
||||
|
||||
assert source_id in builder.dag.nodes
|
||||
node = builder.dag.nodes[source_id]
|
||||
assert node.node_type == NodeType.SOURCE
|
||||
assert node.config["path"] == "/test.mp4"
|
||||
|
||||
def test_builder_segment(self):
|
||||
"""Test building segment node."""
|
||||
builder = DAGBuilder()
|
||||
source_id = builder.source("/test.mp4")
|
||||
segment_id = builder.segment(source_id, duration=5.0, offset=10.0)
|
||||
|
||||
node = builder.dag.nodes[segment_id]
|
||||
assert node.node_type == NodeType.SEGMENT
|
||||
assert node.config["duration"] == 5.0
|
||||
assert node.config["offset"] == 10.0
|
||||
assert source_id in node.inputs
|
||||
|
||||
def test_builder_chain(self):
|
||||
"""Test building a chain of nodes."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source("/test.mp4")
|
||||
segment = builder.segment(source, duration=5.0)
|
||||
resized = builder.resize(segment, width=1920, height=1080)
|
||||
builder.set_output(resized)
|
||||
|
||||
dag = builder.build()
|
||||
|
||||
assert len(dag.nodes) == 3
|
||||
assert dag.output_id == resized
|
||||
errors = dag.validate()
|
||||
assert len(errors) == 0
|
||||
|
||||
def test_builder_sequence(self):
|
||||
"""Test building sequence node."""
|
||||
builder = DAGBuilder()
|
||||
s1 = builder.source("/clip1.mp4")
|
||||
s2 = builder.source("/clip2.mp4")
|
||||
seq = builder.sequence([s1, s2], transition={"type": "crossfade", "duration": 0.5})
|
||||
builder.set_output(seq)
|
||||
|
||||
dag = builder.build()
|
||||
node = dag.nodes[seq]
|
||||
assert node.node_type == NodeType.SEQUENCE
|
||||
assert s1 in node.inputs
|
||||
assert s2 in node.inputs
|
||||
|
||||
def test_builder_mux(self):
|
||||
"""Test building mux node."""
|
||||
builder = DAGBuilder()
|
||||
video = builder.source("/video.mp4")
|
||||
audio = builder.source("/audio.mp3")
|
||||
muxed = builder.mux(video, audio)
|
||||
builder.set_output(muxed)
|
||||
|
||||
dag = builder.build()
|
||||
node = dag.nodes[muxed]
|
||||
assert node.node_type == NodeType.MUX
|
||||
assert video in node.inputs
|
||||
assert audio in node.inputs
|
||||
|
||||
def test_builder_transform(self):
|
||||
"""Test building transform node."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source("/test.mp4")
|
||||
transformed = builder.transform(source, effects={"saturation": 1.5, "contrast": 1.2})
|
||||
builder.set_output(transformed)
|
||||
|
||||
dag = builder.build()
|
||||
node = dag.nodes[transformed]
|
||||
assert node.node_type == NodeType.TRANSFORM
|
||||
assert node.config["effects"]["saturation"] == 1.5
|
||||
|
||||
def test_builder_validation_fails(self):
|
||||
"""Builder raises error for invalid DAG."""
|
||||
builder = DAGBuilder()
|
||||
builder.source("/test.mp4")
|
||||
# No output set
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
builder.build()
|
||||
464
tests/test_engine.py
Normal file
464
tests/test_engine.py
Normal file
@@ -0,0 +1,464 @@
|
||||
# tests/test_primitive_new/test_engine.py
|
||||
"""Tests for primitive engine execution."""
|
||||
|
||||
import pytest
|
||||
import subprocess
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from artdag.dag import DAG, DAGBuilder, Node, NodeType
|
||||
from artdag.engine import Engine
|
||||
from artdag import nodes # Register executors
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cache_dir():
|
||||
"""Create temporary cache directory."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
yield Path(tmpdir)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def engine(cache_dir):
|
||||
"""Create engine instance."""
|
||||
return Engine(cache_dir)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_video(cache_dir):
|
||||
"""Create a test video file."""
|
||||
video_path = cache_dir / "test_video.mp4"
|
||||
cmd = [
|
||||
"ffmpeg", "-y",
|
||||
"-f", "lavfi", "-i", "testsrc=duration=5:size=320x240:rate=30",
|
||||
"-f", "lavfi", "-i", "sine=frequency=440:duration=5",
|
||||
"-c:v", "libx264", "-preset", "ultrafast",
|
||||
"-c:a", "aac",
|
||||
str(video_path)
|
||||
]
|
||||
subprocess.run(cmd, capture_output=True, check=True)
|
||||
return video_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_audio(cache_dir):
|
||||
"""Create a test audio file."""
|
||||
audio_path = cache_dir / "test_audio.mp3"
|
||||
cmd = [
|
||||
"ffmpeg", "-y",
|
||||
"-f", "lavfi", "-i", "sine=frequency=880:duration=5",
|
||||
"-c:a", "libmp3lame",
|
||||
str(audio_path)
|
||||
]
|
||||
subprocess.run(cmd, capture_output=True, check=True)
|
||||
return audio_path
|
||||
|
||||
|
||||
class TestEngineBasic:
|
||||
"""Test basic engine functionality."""
|
||||
|
||||
def test_engine_creation(self, cache_dir):
|
||||
"""Test engine creation."""
|
||||
engine = Engine(cache_dir)
|
||||
assert engine.cache is not None
|
||||
|
||||
def test_invalid_dag(self, engine):
|
||||
"""Test executing invalid DAG."""
|
||||
dag = DAG() # No nodes, no output
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert not result.success
|
||||
assert "Invalid DAG" in result.error
|
||||
|
||||
def test_missing_executor(self, engine):
|
||||
"""Test executing node with missing executor."""
|
||||
dag = DAG()
|
||||
node = Node(node_type="UNKNOWN_TYPE", config={})
|
||||
node_id = dag.add_node(node)
|
||||
dag.set_output(node_id)
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert not result.success
|
||||
assert "No executor" in result.error
|
||||
|
||||
|
||||
class TestSourceExecutor:
|
||||
"""Test SOURCE node executor."""
|
||||
|
||||
def test_source_creates_symlink(self, engine, test_video):
|
||||
"""Test source node creates symlink."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source(str(test_video))
|
||||
builder.set_output(source)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
assert result.output_path.exists()
|
||||
assert result.output_path.is_symlink()
|
||||
|
||||
def test_source_missing_file(self, engine):
|
||||
"""Test source with missing file."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source("/nonexistent/file.mp4")
|
||||
builder.set_output(source)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert not result.success
|
||||
assert "not found" in result.error.lower()
|
||||
|
||||
|
||||
class TestSegmentExecutor:
|
||||
"""Test SEGMENT node executor."""
|
||||
|
||||
def test_segment_duration(self, engine, test_video):
|
||||
"""Test segment extracts correct duration."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source(str(test_video))
|
||||
segment = builder.segment(source, duration=2.0)
|
||||
builder.set_output(segment)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
|
||||
# Verify duration
|
||||
probe = subprocess.run([
|
||||
"ffprobe", "-v", "error",
|
||||
"-show_entries", "format=duration",
|
||||
"-of", "csv=p=0",
|
||||
str(result.output_path)
|
||||
], capture_output=True, text=True)
|
||||
duration = float(probe.stdout.strip())
|
||||
assert abs(duration - 2.0) < 0.1
|
||||
|
||||
def test_segment_with_offset(self, engine, test_video):
|
||||
"""Test segment with offset."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source(str(test_video))
|
||||
segment = builder.segment(source, offset=1.0, duration=2.0)
|
||||
builder.set_output(segment)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
assert result.success
|
||||
|
||||
|
||||
class TestResizeExecutor:
|
||||
"""Test RESIZE node executor."""
|
||||
|
||||
def test_resize_dimensions(self, engine, test_video):
|
||||
"""Test resize to specific dimensions."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source(str(test_video))
|
||||
resized = builder.resize(source, width=640, height=480, mode="fit")
|
||||
builder.set_output(resized)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
|
||||
# Verify dimensions
|
||||
probe = subprocess.run([
|
||||
"ffprobe", "-v", "error",
|
||||
"-show_entries", "stream=width,height",
|
||||
"-of", "csv=p=0:s=x",
|
||||
str(result.output_path)
|
||||
], capture_output=True, text=True)
|
||||
dimensions = probe.stdout.strip().split("\n")[0]
|
||||
assert "640x480" in dimensions
|
||||
|
||||
|
||||
class TestTransformExecutor:
|
||||
"""Test TRANSFORM node executor."""
|
||||
|
||||
def test_transform_saturation(self, engine, test_video):
|
||||
"""Test transform with saturation effect."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source(str(test_video))
|
||||
transformed = builder.transform(source, effects={"saturation": 1.5})
|
||||
builder.set_output(transformed)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
assert result.success
|
||||
assert result.output_path.exists()
|
||||
|
||||
def test_transform_multiple_effects(self, engine, test_video):
|
||||
"""Test transform with multiple effects."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source(str(test_video))
|
||||
transformed = builder.transform(source, effects={
|
||||
"saturation": 1.2,
|
||||
"contrast": 1.1,
|
||||
"brightness": 0.05,
|
||||
})
|
||||
builder.set_output(transformed)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
assert result.success
|
||||
|
||||
|
||||
class TestSequenceExecutor:
|
||||
"""Test SEQUENCE node executor."""
|
||||
|
||||
def test_sequence_cut(self, engine, test_video):
|
||||
"""Test sequence with cut transition."""
|
||||
builder = DAGBuilder()
|
||||
s1 = builder.source(str(test_video))
|
||||
seg1 = builder.segment(s1, duration=2.0)
|
||||
seg2 = builder.segment(s1, offset=2.0, duration=2.0)
|
||||
seq = builder.sequence([seg1, seg2], transition={"type": "cut"})
|
||||
builder.set_output(seq)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
|
||||
# Verify combined duration
|
||||
probe = subprocess.run([
|
||||
"ffprobe", "-v", "error",
|
||||
"-show_entries", "format=duration",
|
||||
"-of", "csv=p=0",
|
||||
str(result.output_path)
|
||||
], capture_output=True, text=True)
|
||||
duration = float(probe.stdout.strip())
|
||||
assert abs(duration - 4.0) < 0.2
|
||||
|
||||
def test_sequence_crossfade(self, engine, test_video):
|
||||
"""Test sequence with crossfade transition."""
|
||||
builder = DAGBuilder()
|
||||
s1 = builder.source(str(test_video))
|
||||
seg1 = builder.segment(s1, duration=3.0)
|
||||
seg2 = builder.segment(s1, offset=1.0, duration=3.0)
|
||||
seq = builder.sequence([seg1, seg2], transition={"type": "crossfade", "duration": 0.5})
|
||||
builder.set_output(seq)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
|
||||
# Duration should be sum minus crossfade
|
||||
probe = subprocess.run([
|
||||
"ffprobe", "-v", "error",
|
||||
"-show_entries", "format=duration",
|
||||
"-of", "csv=p=0",
|
||||
str(result.output_path)
|
||||
], capture_output=True, text=True)
|
||||
duration = float(probe.stdout.strip())
|
||||
# 3 + 3 - 0.5 = 5.5
|
||||
assert abs(duration - 5.5) < 0.3
|
||||
|
||||
|
||||
class TestMuxExecutor:
|
||||
"""Test MUX node executor."""
|
||||
|
||||
def test_mux_video_audio(self, engine, test_video, test_audio):
|
||||
"""Test muxing video and audio."""
|
||||
builder = DAGBuilder()
|
||||
video = builder.source(str(test_video))
|
||||
audio = builder.source(str(test_audio))
|
||||
muxed = builder.mux(video, audio)
|
||||
builder.set_output(muxed)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
assert result.output_path.exists()
|
||||
|
||||
|
||||
class TestAudioMixExecutor:
|
||||
"""Test AUDIO_MIX node executor."""
|
||||
|
||||
def test_audio_mix_simple(self, engine, cache_dir):
|
||||
"""Test simple audio mixing."""
|
||||
# Create two test audio files with different frequencies
|
||||
audio1_path = cache_dir / "audio1.mp3"
|
||||
audio2_path = cache_dir / "audio2.mp3"
|
||||
|
||||
subprocess.run([
|
||||
"ffmpeg", "-y",
|
||||
"-f", "lavfi", "-i", "sine=frequency=440:duration=3",
|
||||
"-c:a", "libmp3lame",
|
||||
str(audio1_path)
|
||||
], capture_output=True, check=True)
|
||||
|
||||
subprocess.run([
|
||||
"ffmpeg", "-y",
|
||||
"-f", "lavfi", "-i", "sine=frequency=880:duration=3",
|
||||
"-c:a", "libmp3lame",
|
||||
str(audio2_path)
|
||||
], capture_output=True, check=True)
|
||||
|
||||
builder = DAGBuilder()
|
||||
a1 = builder.source(str(audio1_path))
|
||||
a2 = builder.source(str(audio2_path))
|
||||
mixed = builder.audio_mix([a1, a2])
|
||||
builder.set_output(mixed)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
assert result.output_path.exists()
|
||||
|
||||
def test_audio_mix_with_gains(self, engine, cache_dir):
|
||||
"""Test audio mixing with custom gains."""
|
||||
audio1_path = cache_dir / "audio1.mp3"
|
||||
audio2_path = cache_dir / "audio2.mp3"
|
||||
|
||||
subprocess.run([
|
||||
"ffmpeg", "-y",
|
||||
"-f", "lavfi", "-i", "sine=frequency=440:duration=3",
|
||||
"-c:a", "libmp3lame",
|
||||
str(audio1_path)
|
||||
], capture_output=True, check=True)
|
||||
|
||||
subprocess.run([
|
||||
"ffmpeg", "-y",
|
||||
"-f", "lavfi", "-i", "sine=frequency=880:duration=3",
|
||||
"-c:a", "libmp3lame",
|
||||
str(audio2_path)
|
||||
], capture_output=True, check=True)
|
||||
|
||||
builder = DAGBuilder()
|
||||
a1 = builder.source(str(audio1_path))
|
||||
a2 = builder.source(str(audio2_path))
|
||||
mixed = builder.audio_mix([a1, a2], gains=[1.0, 0.3])
|
||||
builder.set_output(mixed)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
assert result.output_path.exists()
|
||||
|
||||
def test_audio_mix_three_inputs(self, engine, cache_dir):
|
||||
"""Test mixing three audio sources."""
|
||||
audio_paths = []
|
||||
for i, freq in enumerate([440, 660, 880]):
|
||||
path = cache_dir / f"audio{i}.mp3"
|
||||
subprocess.run([
|
||||
"ffmpeg", "-y",
|
||||
"-f", "lavfi", "-i", f"sine=frequency={freq}:duration=2",
|
||||
"-c:a", "libmp3lame",
|
||||
str(path)
|
||||
], capture_output=True, check=True)
|
||||
audio_paths.append(path)
|
||||
|
||||
builder = DAGBuilder()
|
||||
sources = [builder.source(str(p)) for p in audio_paths]
|
||||
mixed = builder.audio_mix(sources, gains=[1.0, 0.5, 0.3])
|
||||
builder.set_output(mixed)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
assert result.output_path.exists()
|
||||
|
||||
|
||||
class TestCaching:
|
||||
"""Test engine caching behavior."""
|
||||
|
||||
def test_cache_reuse(self, engine, test_video):
|
||||
"""Test that cached results are reused."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source(str(test_video))
|
||||
builder.set_output(source)
|
||||
dag = builder.build()
|
||||
|
||||
# First execution
|
||||
result1 = engine.execute(dag)
|
||||
assert result1.success
|
||||
assert result1.nodes_cached == 0
|
||||
assert result1.nodes_executed == 1
|
||||
|
||||
# Second execution should use cache
|
||||
result2 = engine.execute(dag)
|
||||
assert result2.success
|
||||
assert result2.nodes_cached == 1
|
||||
assert result2.nodes_executed == 0
|
||||
|
||||
def test_clear_cache(self, engine, test_video):
|
||||
"""Test clearing cache."""
|
||||
builder = DAGBuilder()
|
||||
source = builder.source(str(test_video))
|
||||
builder.set_output(source)
|
||||
dag = builder.build()
|
||||
|
||||
engine.execute(dag)
|
||||
assert engine.cache.stats.total_entries == 1
|
||||
|
||||
engine.clear_cache()
|
||||
assert engine.cache.stats.total_entries == 0
|
||||
|
||||
|
||||
class TestProgressCallback:
|
||||
"""Test progress callback functionality."""
|
||||
|
||||
def test_progress_callback(self, engine, test_video):
|
||||
"""Test that progress callback is called."""
|
||||
progress_updates = []
|
||||
|
||||
def callback(progress):
|
||||
progress_updates.append((progress.node_id, progress.status))
|
||||
|
||||
engine.set_progress_callback(callback)
|
||||
|
||||
builder = DAGBuilder()
|
||||
source = builder.source(str(test_video))
|
||||
builder.set_output(source)
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
assert len(progress_updates) > 0
|
||||
# Should have pending, running, completed
|
||||
statuses = [p[1] for p in progress_updates]
|
||||
assert "pending" in statuses
|
||||
assert "completed" in statuses
|
||||
|
||||
|
||||
class TestFullWorkflow:
|
||||
"""Test complete workflow."""
|
||||
|
||||
def test_full_pipeline(self, engine, test_video, test_audio):
|
||||
"""Test complete video processing pipeline."""
|
||||
builder = DAGBuilder()
|
||||
|
||||
# Load sources
|
||||
video = builder.source(str(test_video))
|
||||
audio = builder.source(str(test_audio))
|
||||
|
||||
# Extract segment
|
||||
segment = builder.segment(video, duration=3.0)
|
||||
|
||||
# Resize
|
||||
resized = builder.resize(segment, width=640, height=480)
|
||||
|
||||
# Apply effects
|
||||
transformed = builder.transform(resized, effects={"saturation": 1.3})
|
||||
|
||||
# Mux with audio
|
||||
final = builder.mux(transformed, audio)
|
||||
builder.set_output(final)
|
||||
|
||||
dag = builder.build()
|
||||
|
||||
result = engine.execute(dag)
|
||||
|
||||
assert result.success
|
||||
assert result.output_path.exists()
|
||||
assert result.nodes_executed == 6 # source, source, segment, resize, transform, mux
|
||||
110
tests/test_executor.py
Normal file
110
tests/test_executor.py
Normal file
@@ -0,0 +1,110 @@
|
||||
# tests/test_primitive_new/test_executor.py
|
||||
"""Tests for primitive executor module."""
|
||||
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from artdag.dag import NodeType
|
||||
from artdag.executor import (
|
||||
Executor,
|
||||
register_executor,
|
||||
get_executor,
|
||||
list_executors,
|
||||
clear_executors,
|
||||
)
|
||||
|
||||
|
||||
class TestExecutorRegistry:
|
||||
"""Test executor registration."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Clear registry before each test."""
|
||||
clear_executors()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Clear registry after each test."""
|
||||
clear_executors()
|
||||
|
||||
def test_register_executor(self):
|
||||
"""Test registering an executor."""
|
||||
@register_executor(NodeType.SOURCE)
|
||||
class TestSourceExecutor(Executor):
|
||||
def execute(self, config, inputs, output_path):
|
||||
return output_path
|
||||
|
||||
executor = get_executor(NodeType.SOURCE)
|
||||
assert executor is not None
|
||||
assert isinstance(executor, TestSourceExecutor)
|
||||
|
||||
def test_register_custom_type(self):
|
||||
"""Test registering executor for custom type."""
|
||||
@register_executor("CUSTOM_NODE")
|
||||
class CustomExecutor(Executor):
|
||||
def execute(self, config, inputs, output_path):
|
||||
return output_path
|
||||
|
||||
executor = get_executor("CUSTOM_NODE")
|
||||
assert executor is not None
|
||||
|
||||
def test_get_unregistered(self):
|
||||
"""Test getting unregistered executor."""
|
||||
executor = get_executor(NodeType.ANALYZE)
|
||||
assert executor is None
|
||||
|
||||
def test_list_executors(self):
|
||||
"""Test listing registered executors."""
|
||||
@register_executor(NodeType.SOURCE)
|
||||
class SourceExec(Executor):
|
||||
def execute(self, config, inputs, output_path):
|
||||
return output_path
|
||||
|
||||
@register_executor(NodeType.SEGMENT)
|
||||
class SegmentExec(Executor):
|
||||
def execute(self, config, inputs, output_path):
|
||||
return output_path
|
||||
|
||||
executors = list_executors()
|
||||
assert "SOURCE" in executors
|
||||
assert "SEGMENT" in executors
|
||||
|
||||
def test_overwrite_warning(self, caplog):
|
||||
"""Test warning when overwriting executor."""
|
||||
@register_executor(NodeType.SOURCE)
|
||||
class FirstExecutor(Executor):
|
||||
def execute(self, config, inputs, output_path):
|
||||
return output_path
|
||||
|
||||
# Register again - should warn
|
||||
@register_executor(NodeType.SOURCE)
|
||||
class SecondExecutor(Executor):
|
||||
def execute(self, config, inputs, output_path):
|
||||
return output_path
|
||||
|
||||
# Second should be registered
|
||||
executor = get_executor(NodeType.SOURCE)
|
||||
assert isinstance(executor, SecondExecutor)
|
||||
|
||||
|
||||
class TestExecutorBase:
|
||||
"""Test Executor base class."""
|
||||
|
||||
def test_validate_config_default(self):
|
||||
"""Test default validate_config returns empty list."""
|
||||
class TestExecutor(Executor):
|
||||
def execute(self, config, inputs, output_path):
|
||||
return output_path
|
||||
|
||||
executor = TestExecutor()
|
||||
errors = executor.validate_config({"any": "config"})
|
||||
assert errors == []
|
||||
|
||||
def test_estimate_output_size(self):
|
||||
"""Test default output size estimation."""
|
||||
class TestExecutor(Executor):
|
||||
def execute(self, config, inputs, output_path):
|
||||
return output_path
|
||||
|
||||
executor = TestExecutor()
|
||||
size = executor.estimate_output_size({}, [100, 200, 300])
|
||||
assert size == 600
|
||||
301
tests/test_ipfs_access.py
Normal file
301
tests/test_ipfs_access.py
Normal file
@@ -0,0 +1,301 @@
|
||||
"""
|
||||
Tests for IPFS access consistency.
|
||||
|
||||
All IPFS access should use IPFS_API (multiaddr format) for consistency
|
||||
with art-celery's ipfs_client.py. This ensures Docker deployments work
|
||||
correctly since IPFS_API is set to /dns/ipfs/tcp/5001.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def multiaddr_to_url(multiaddr: str) -> str:
|
||||
"""
|
||||
Convert IPFS multiaddr to HTTP URL.
|
||||
|
||||
This is the canonical conversion used by ipfs_client.py.
|
||||
"""
|
||||
# Handle /dns/hostname/tcp/port format
|
||||
dns_match = re.match(r"/dns[46]?/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if dns_match:
|
||||
return f"http://{dns_match.group(1)}:{dns_match.group(2)}"
|
||||
|
||||
# Handle /ip4/address/tcp/port format
|
||||
ip4_match = re.match(r"/ip4/([^/]+)/tcp/(\d+)", multiaddr)
|
||||
if ip4_match:
|
||||
return f"http://{ip4_match.group(1)}:{ip4_match.group(2)}"
|
||||
|
||||
# Fallback: assume it's already a URL or use default
|
||||
if multiaddr.startswith("http"):
|
||||
return multiaddr
|
||||
return "http://127.0.0.1:5001"
|
||||
|
||||
|
||||
class TestMultiaddrConversion:
|
||||
"""Tests for multiaddr to URL conversion."""
|
||||
|
||||
def test_dns_format(self) -> None:
|
||||
"""Docker DNS format should convert correctly."""
|
||||
result = multiaddr_to_url("/dns/ipfs/tcp/5001")
|
||||
assert result == "http://ipfs:5001"
|
||||
|
||||
def test_dns4_format(self) -> None:
|
||||
"""dns4 format should work."""
|
||||
result = multiaddr_to_url("/dns4/ipfs.example.com/tcp/5001")
|
||||
assert result == "http://ipfs.example.com:5001"
|
||||
|
||||
def test_ip4_format(self) -> None:
|
||||
"""IPv4 format should convert correctly."""
|
||||
result = multiaddr_to_url("/ip4/127.0.0.1/tcp/5001")
|
||||
assert result == "http://127.0.0.1:5001"
|
||||
|
||||
def test_already_url(self) -> None:
|
||||
"""HTTP URLs should pass through."""
|
||||
result = multiaddr_to_url("http://localhost:5001")
|
||||
assert result == "http://localhost:5001"
|
||||
|
||||
def test_fallback(self) -> None:
|
||||
"""Unknown format should fallback to localhost."""
|
||||
result = multiaddr_to_url("garbage")
|
||||
assert result == "http://127.0.0.1:5001"
|
||||
|
||||
|
||||
class TestIPFSConfigConsistency:
|
||||
"""
|
||||
Tests to ensure IPFS configuration is consistent.
|
||||
|
||||
The effect executor should use IPFS_API (like ipfs_client.py)
|
||||
rather than a separate IPFS_GATEWAY variable.
|
||||
"""
|
||||
|
||||
def test_effect_module_should_not_use_gateway_var(self) -> None:
|
||||
"""
|
||||
Regression test: Effect module should use IPFS_API, not IPFS_GATEWAY.
|
||||
|
||||
Bug found 2026-01-12: artdag/nodes/effect.py used IPFS_GATEWAY which
|
||||
defaulted to http://127.0.0.1:8080. This doesn't work in Docker where
|
||||
the IPFS node is a separate container. The ipfs_client.py uses IPFS_API
|
||||
which is correctly set in docker-compose.
|
||||
"""
|
||||
from artdag.nodes import effect
|
||||
|
||||
# Check if the module still has the old IPFS_GATEWAY variable
|
||||
# After the fix, this should use IPFS_API instead
|
||||
has_gateway_var = hasattr(effect, 'IPFS_GATEWAY')
|
||||
has_api_var = hasattr(effect, 'IPFS_API') or hasattr(effect, '_get_ipfs_base_url')
|
||||
|
||||
# This test documents the current buggy state
|
||||
# After fix: has_gateway_var should be False, has_api_var should be True
|
||||
if has_gateway_var and not has_api_var:
|
||||
pytest.fail(
|
||||
"Effect module uses IPFS_GATEWAY instead of IPFS_API. "
|
||||
"This breaks Docker deployments where IPFS_API=/dns/ipfs/tcp/5001 "
|
||||
"but IPFS_GATEWAY defaults to localhost."
|
||||
)
|
||||
|
||||
def test_ipfs_api_default_is_localhost(self) -> None:
|
||||
"""IPFS_API should default to localhost for local development."""
|
||||
default_api = "/ip4/127.0.0.1/tcp/5001"
|
||||
url = multiaddr_to_url(default_api)
|
||||
assert "127.0.0.1" in url
|
||||
assert "5001" in url
|
||||
|
||||
def test_docker_ipfs_api_uses_service_name(self) -> None:
|
||||
"""In Docker, IPFS_API should use the service name."""
|
||||
docker_api = "/dns/ipfs/tcp/5001"
|
||||
url = multiaddr_to_url(docker_api)
|
||||
assert url == "http://ipfs:5001"
|
||||
assert "127.0.0.1" not in url
|
||||
|
||||
|
||||
class TestEffectFetchURL:
|
||||
"""Tests for the URL used to fetch effects from IPFS."""
|
||||
|
||||
def test_fetch_should_use_api_cat_endpoint(self) -> None:
|
||||
"""
|
||||
Effect fetch should use /api/v0/cat endpoint (like ipfs_client.py).
|
||||
|
||||
The IPFS API's cat endpoint works reliably in Docker.
|
||||
The gateway endpoint (port 8080) requires separate configuration.
|
||||
"""
|
||||
# The correct way to fetch via API
|
||||
base_url = "http://ipfs:5001"
|
||||
cid = "QmTestCid123"
|
||||
correct_url = f"{base_url}/api/v0/cat?arg={cid}"
|
||||
|
||||
assert "/api/v0/cat" in correct_url
|
||||
assert "arg=" in correct_url
|
||||
|
||||
def test_gateway_url_is_different_from_api(self) -> None:
|
||||
"""
|
||||
Document the difference between gateway and API URLs.
|
||||
|
||||
Gateway: http://ipfs:8080/ipfs/{cid} (requires IPFS_GATEWAY config)
|
||||
API: http://ipfs:5001/api/v0/cat?arg={cid} (uses IPFS_API config)
|
||||
|
||||
Using the API is more reliable since IPFS_API is already configured
|
||||
correctly in docker-compose.yml.
|
||||
"""
|
||||
cid = "QmTestCid123"
|
||||
|
||||
# Gateway style (the old broken way)
|
||||
gateway_url = f"http://ipfs:8080/ipfs/{cid}"
|
||||
|
||||
# API style (the correct way)
|
||||
api_url = f"http://ipfs:5001/api/v0/cat?arg={cid}"
|
||||
|
||||
# These are different approaches
|
||||
assert gateway_url != api_url
|
||||
assert ":8080" in gateway_url
|
||||
assert ":5001" in api_url
|
||||
|
||||
|
||||
class TestEffectDependencies:
|
||||
"""Tests for effect dependency handling."""
|
||||
|
||||
def test_parse_pep723_dependencies(self) -> None:
|
||||
"""Should parse PEP 723 dependencies from effect source."""
|
||||
source = '''
|
||||
# /// script
|
||||
# requires-python = ">=3.10"
|
||||
# dependencies = ["numpy", "opencv-python"]
|
||||
# ///
|
||||
"""
|
||||
@effect test_effect
|
||||
"""
|
||||
|
||||
def process_frame(frame, params, state):
|
||||
return frame, state
|
||||
'''
|
||||
# Import the function after the fix is applied
|
||||
from artdag.nodes.effect import _parse_pep723_dependencies
|
||||
|
||||
deps = _parse_pep723_dependencies(source)
|
||||
|
||||
assert deps == ["numpy", "opencv-python"]
|
||||
|
||||
def test_parse_pep723_no_dependencies(self) -> None:
|
||||
"""Should return empty list if no dependencies block."""
|
||||
source = '''
|
||||
"""
|
||||
@effect simple_effect
|
||||
"""
|
||||
|
||||
def process_frame(frame, params, state):
|
||||
return frame, state
|
||||
'''
|
||||
from artdag.nodes.effect import _parse_pep723_dependencies
|
||||
|
||||
deps = _parse_pep723_dependencies(source)
|
||||
|
||||
assert deps == []
|
||||
|
||||
def test_ensure_dependencies_already_installed(self) -> None:
|
||||
"""Should return True if dependencies are already installed."""
|
||||
from artdag.nodes.effect import _ensure_dependencies
|
||||
|
||||
# os is always available
|
||||
result = _ensure_dependencies(["os"], "QmTest123")
|
||||
|
||||
assert result is True
|
||||
|
||||
def test_effect_with_missing_dependency_gives_clear_error(self, tmp_path: Path) -> None:
|
||||
"""
|
||||
Regression test: Missing dependencies should give clear error message.
|
||||
|
||||
Bug found 2026-01-12: Effect with numpy dependency failed with
|
||||
"No module named 'numpy'" but this was swallowed and reported as
|
||||
"Unknown effect: invert" - very confusing.
|
||||
"""
|
||||
effects_dir = tmp_path / "_effects"
|
||||
effect_cid = "QmTestEffectWithDeps"
|
||||
|
||||
# Create effect that imports a non-existent module
|
||||
effect_dir = effects_dir / effect_cid
|
||||
effect_dir.mkdir(parents=True)
|
||||
(effect_dir / "effect.py").write_text('''
|
||||
# /// script
|
||||
# requires-python = ">=3.10"
|
||||
# dependencies = ["some_nonexistent_package_xyz"]
|
||||
# ///
|
||||
"""
|
||||
@effect test_effect
|
||||
"""
|
||||
import some_nonexistent_package_xyz
|
||||
|
||||
def process_frame(frame, params, state):
|
||||
return frame, state
|
||||
''')
|
||||
|
||||
# The effect file exists
|
||||
effect_path = effects_dir / effect_cid / "effect.py"
|
||||
assert effect_path.exists()
|
||||
|
||||
# When loading fails due to missing import, error should mention the dependency
|
||||
with patch.dict(os.environ, {"CACHE_DIR": str(tmp_path)}):
|
||||
from artdag.nodes.effect import _load_cached_effect
|
||||
|
||||
# This should return None but log a clear error about the missing module
|
||||
result = _load_cached_effect(effect_cid)
|
||||
|
||||
# Currently returns None, which causes "Unknown effect" error
|
||||
# The real issue is the dependency isn't installed
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestEffectCacheAndFetch:
|
||||
"""Integration tests for effect caching and fetching."""
|
||||
|
||||
def test_effect_loads_from_cache_without_ipfs(self, tmp_path: Path) -> None:
|
||||
"""When effect is in cache, IPFS should not be contacted."""
|
||||
effects_dir = tmp_path / "_effects"
|
||||
effect_cid = "QmTestEffect123"
|
||||
|
||||
# Create cached effect
|
||||
effect_dir = effects_dir / effect_cid
|
||||
effect_dir.mkdir(parents=True)
|
||||
(effect_dir / "effect.py").write_text('''
|
||||
def process_frame(frame, params, state):
|
||||
return frame, state
|
||||
''')
|
||||
|
||||
# Patch environment and verify effect can be loaded
|
||||
with patch.dict(os.environ, {"CACHE_DIR": str(tmp_path)}):
|
||||
from artdag.nodes.effect import _load_cached_effect
|
||||
|
||||
# Should load without hitting IPFS
|
||||
effect_fn = _load_cached_effect(effect_cid)
|
||||
assert effect_fn is not None
|
||||
|
||||
def test_effect_fetch_uses_correct_endpoint(self, tmp_path: Path) -> None:
|
||||
"""When fetching from IPFS, should use API endpoint."""
|
||||
effects_dir = tmp_path / "_effects"
|
||||
effects_dir.mkdir(parents=True)
|
||||
effect_cid = "QmNonExistentEffect"
|
||||
|
||||
with patch.dict(os.environ, {
|
||||
"CACHE_DIR": str(tmp_path),
|
||||
"IPFS_API": "/dns/ipfs/tcp/5001"
|
||||
}):
|
||||
with patch('requests.post') as mock_post:
|
||||
# Set up mock to return effect source
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.content = b'def process_frame(f, p, s): return f, s'
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
from artdag.nodes.effect import _load_cached_effect
|
||||
|
||||
# Try to load - should attempt IPFS fetch
|
||||
_load_cached_effect(effect_cid)
|
||||
|
||||
# After fix, this should use the API endpoint
|
||||
# Check if requests.post was called (API style)
|
||||
# or requests.get was called (gateway style)
|
||||
# The fix should make it use POST to /api/v0/cat
|
||||
Reference in New Issue
Block a user