256 lines
12 KiB
Python
256 lines
12 KiB
Python
from __future__ import annotations
|
|
|
|
import asyncio
|
|
import sys
|
|
import tempfile
|
|
import threading
|
|
import time
|
|
import unittest
|
|
from pathlib import Path
|
|
|
|
import httpx
|
|
|
|
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
|
|
|
from backend.app.dependencies import get_duplicate_task_service, get_task_service
|
|
from backend.app.db.task_repository import TaskRepository
|
|
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
|
from backend.app.main import app
|
|
from backend.app.security.path_guard import PathGuard
|
|
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
|
from backend.app.services.task_service import TaskService
|
|
from backend.app.tasks_runner import TaskRunner
|
|
|
|
|
|
class FailOnSecondCopyFilesystemAdapter(FilesystemAdapter):
|
|
def __init__(self) -> None:
|
|
super().__init__()
|
|
self._copy_calls = 0
|
|
|
|
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
|
self._copy_calls += 1
|
|
if self._copy_calls == 2:
|
|
raise OSError("forced duplicate failure")
|
|
super().copy_file(source=source, destination=destination, on_progress=on_progress)
|
|
|
|
|
|
class BlockingDuplicateFilesystemAdapter(FilesystemAdapter):
|
|
def __init__(self) -> None:
|
|
super().__init__()
|
|
self.entered = threading.Event()
|
|
self.release = threading.Event()
|
|
|
|
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
|
self.entered.set()
|
|
self.release.wait(timeout=2.0)
|
|
super().copy_file(source=source, destination=destination, on_progress=on_progress)
|
|
|
|
|
|
class DuplicateApiGoldenTest(unittest.TestCase):
|
|
def setUp(self) -> None:
|
|
self.temp_dir = tempfile.TemporaryDirectory()
|
|
self.root = Path(self.temp_dir.name) / "root"
|
|
self.root.mkdir(parents=True, exist_ok=True)
|
|
self.repo = TaskRepository(str(Path(self.temp_dir.name) / "tasks.db"))
|
|
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
|
self._set_services(path_guard=path_guard, filesystem=FilesystemAdapter())
|
|
|
|
def tearDown(self) -> None:
|
|
app.dependency_overrides.clear()
|
|
self.temp_dir.cleanup()
|
|
|
|
def _set_services(self, path_guard: PathGuard, filesystem: FilesystemAdapter) -> None:
|
|
runner = TaskRunner(repository=self.repo, filesystem=filesystem)
|
|
duplicate_service = DuplicateTaskService(path_guard=path_guard, repository=self.repo, runner=runner)
|
|
task_service = TaskService(repository=self.repo)
|
|
|
|
async def _override_duplicate_service() -> DuplicateTaskService:
|
|
return duplicate_service
|
|
|
|
async def _override_task_service() -> TaskService:
|
|
return task_service
|
|
|
|
app.dependency_overrides[get_duplicate_task_service] = _override_duplicate_service
|
|
app.dependency_overrides[get_task_service] = _override_task_service
|
|
|
|
def _request(self, method: str, url: str, payload: dict | None = None) -> httpx.Response:
|
|
async def _run() -> httpx.Response:
|
|
transport = httpx.ASGITransport(app=app)
|
|
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
|
if method == "POST":
|
|
return await client.post(url, json=payload)
|
|
return await client.get(url)
|
|
|
|
return asyncio.run(_run())
|
|
|
|
def _wait_task(self, task_id: str, timeout_s: float = 2.0) -> dict:
|
|
deadline = time.time() + timeout_s
|
|
while time.time() < deadline:
|
|
response = self._request("GET", f"/api/tasks/{task_id}")
|
|
body = response.json()
|
|
if body["status"] in {"completed", "failed", "cancelled"}:
|
|
return body
|
|
time.sleep(0.02)
|
|
self.fail("task did not reach terminal state in time")
|
|
|
|
def _wait_for_status(self, task_id: str, statuses: set[str], timeout_s: float = 2.0) -> dict:
|
|
deadline = time.time() + timeout_s
|
|
while time.time() < deadline:
|
|
response = self._request("GET", f"/api/tasks/{task_id}")
|
|
body = response.json()
|
|
if body["status"] in statuses:
|
|
return body
|
|
time.sleep(0.02)
|
|
self.fail(f"task did not reach one of {sorted(statuses)} in time")
|
|
|
|
def test_duplicate_single_file_success(self) -> None:
|
|
(self.root / "note.txt").write_text("hello", encoding="utf-8")
|
|
|
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/note.txt"]})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
detail = self._wait_task(response.json()["task_id"])
|
|
self.assertEqual(detail["operation"], "duplicate")
|
|
self.assertEqual(detail["status"], "completed")
|
|
self.assertEqual(detail["done_items"], 1)
|
|
self.assertEqual(detail["total_items"], 1)
|
|
self.assertEqual(detail["source"], "storage1/note.txt")
|
|
self.assertEqual(detail["destination"], "storage1/note copy.txt")
|
|
self.assertEqual((self.root / "note copy.txt").read_text(encoding="utf-8"), "hello")
|
|
|
|
def test_duplicate_single_directory_success(self) -> None:
|
|
(self.root / "Folder" / "nested").mkdir(parents=True)
|
|
(self.root / "Folder" / "alpha.txt").write_text("A", encoding="utf-8")
|
|
(self.root / "Folder" / "nested" / "beta.txt").write_text("B", encoding="utf-8")
|
|
|
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/Folder"]})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
detail = self._wait_task(response.json()["task_id"])
|
|
self.assertEqual(detail["status"], "completed")
|
|
self.assertEqual(detail["done_items"], 1)
|
|
self.assertEqual(detail["total_items"], 1)
|
|
self.assertTrue((self.root / "Folder copy").is_dir())
|
|
self.assertEqual((self.root / "Folder copy" / "alpha.txt").read_text(encoding="utf-8"), "A")
|
|
self.assertEqual((self.root / "Folder copy" / "nested" / "beta.txt").read_text(encoding="utf-8"), "B")
|
|
|
|
def test_duplicate_multi_select_success(self) -> None:
|
|
(self.root / "a.txt").write_text("A", encoding="utf-8")
|
|
(self.root / "docs" / "nested").mkdir(parents=True)
|
|
(self.root / "docs" / "nested" / "b.txt").write_text("B", encoding="utf-8")
|
|
|
|
response = self._request(
|
|
"POST",
|
|
"/api/files/duplicate",
|
|
{"paths": ["storage1/a.txt", "storage1/docs"]},
|
|
)
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
detail = self._wait_task(response.json()["task_id"])
|
|
self.assertEqual(detail["status"], "completed")
|
|
self.assertEqual(detail["done_items"], 2)
|
|
self.assertEqual(detail["total_items"], 2)
|
|
self.assertEqual(detail["source"], "2 items")
|
|
self.assertEqual(detail["destination"], "same directory")
|
|
self.assertEqual((self.root / "a copy.txt").read_text(encoding="utf-8"), "A")
|
|
self.assertEqual((self.root / "docs copy" / "nested" / "b.txt").read_text(encoding="utf-8"), "B")
|
|
|
|
def test_duplicate_multi_select_cancelled_after_current_item_finishes(self) -> None:
|
|
blocking_fs = BlockingDuplicateFilesystemAdapter()
|
|
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
|
self._set_services(path_guard=path_guard, filesystem=blocking_fs)
|
|
(self.root / "a.txt").write_text("A", encoding="utf-8")
|
|
(self.root / "b.txt").write_text("B", encoding="utf-8")
|
|
|
|
response = self._request(
|
|
"POST",
|
|
"/api/files/duplicate",
|
|
{"paths": ["storage1/a.txt", "storage1/b.txt"]},
|
|
)
|
|
|
|
task_id = response.json()["task_id"]
|
|
self.assertTrue(blocking_fs.entered.wait(timeout=2.0))
|
|
running = self._wait_for_status(task_id, {"running"})
|
|
self.assertEqual(running["current_item"], str(self.root / "a.txt"))
|
|
|
|
cancel_response = self._request("POST", f"/api/tasks/{task_id}/cancel")
|
|
self.assertEqual(cancel_response.status_code, 200)
|
|
self.assertEqual(cancel_response.json()["status"], "cancelling")
|
|
|
|
blocking_fs.release.set()
|
|
detail = self._wait_task(task_id)
|
|
self.assertEqual(detail["status"], "cancelled")
|
|
self.assertEqual(detail["done_items"], 1)
|
|
self.assertEqual(detail["total_items"], 2)
|
|
self.assertTrue((self.root / "a copy.txt").exists())
|
|
self.assertFalse((self.root / "b copy.txt").exists())
|
|
|
|
def test_duplicate_collision_resolution_for_files_and_directories(self) -> None:
|
|
(self.root / "report.txt").write_text("R", encoding="utf-8")
|
|
(self.root / "report copy.txt").write_text("existing", encoding="utf-8")
|
|
(self.root / "report copy 2.txt").write_text("existing", encoding="utf-8")
|
|
(self.root / "Album").mkdir()
|
|
(self.root / "Album copy").mkdir()
|
|
(self.root / "Album copy 2").mkdir()
|
|
|
|
file_response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/report.txt"]})
|
|
dir_response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/Album"]})
|
|
|
|
self.assertEqual(self._wait_task(file_response.json()["task_id"])["destination"], "storage1/report copy 3.txt")
|
|
self.assertEqual(self._wait_task(dir_response.json()["task_id"])["destination"], "storage1/Album copy 3")
|
|
self.assertTrue((self.root / "report copy 3.txt").exists())
|
|
self.assertTrue((self.root / "Album copy 3").is_dir())
|
|
|
|
def test_duplicate_skips_top_level_macos_sidecar(self) -> None:
|
|
(self.root / "._note.txt").write_text("sidecar", encoding="utf-8")
|
|
|
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/._note.txt"]})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
detail = self._wait_task(response.json()["task_id"])
|
|
self.assertEqual(detail["status"], "completed")
|
|
self.assertEqual(detail["done_items"], 0)
|
|
self.assertEqual(detail["total_items"], 0)
|
|
self.assertFalse((self.root / "._note copy.txt").exists())
|
|
|
|
def test_duplicate_skips_macos_sidecars_inside_directory_tree(self) -> None:
|
|
(self.root / "docs" / "nested").mkdir(parents=True)
|
|
(self.root / "docs" / "keep.txt").write_text("keep", encoding="utf-8")
|
|
(self.root / "docs" / "._skip.txt").write_text("skip", encoding="utf-8")
|
|
(self.root / "docs" / "nested" / "._nested.txt").write_text("skip", encoding="utf-8")
|
|
(self.root / "docs" / "nested" / "real.txt").write_text("real", encoding="utf-8")
|
|
|
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/docs"]})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
self._wait_task(response.json()["task_id"])
|
|
self.assertTrue((self.root / "docs copy" / "keep.txt").exists())
|
|
self.assertTrue((self.root / "docs copy" / "nested" / "real.txt").exists())
|
|
self.assertFalse((self.root / "docs copy" / "._skip.txt").exists())
|
|
self.assertFalse((self.root / "docs copy" / "nested" / "._nested.txt").exists())
|
|
|
|
def test_duplicate_failure_removes_partial_directory_artifact(self) -> None:
|
|
(self.root / "docs").mkdir()
|
|
(self.root / "docs" / "a.txt").write_text("A", encoding="utf-8")
|
|
(self.root / "docs" / "b.txt").write_text("B", encoding="utf-8")
|
|
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
|
self._set_services(path_guard=path_guard, filesystem=FailOnSecondCopyFilesystemAdapter())
|
|
|
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/docs"]})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
detail = self._wait_task(response.json()["task_id"])
|
|
self.assertEqual(detail["status"], "failed")
|
|
self.assertEqual(detail["error_code"], "io_error")
|
|
self.assertFalse((self.root / "docs copy").exists())
|
|
|
|
def test_duplicate_path_safety_stays_intact(self) -> None:
|
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/../escape.txt"]})
|
|
|
|
self.assertEqual(response.status_code, 403)
|
|
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
unittest.main()
|