feature: duplicate 01
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from backend.app.api.schemas import DuplicateRequest, TaskCreateResponse
|
||||
from backend.app.dependencies import get_duplicate_task_service
|
||||
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
||||
|
||||
router = APIRouter(prefix="/files")
|
||||
|
||||
|
||||
@router.post("/duplicate", response_model=TaskCreateResponse, status_code=202)
|
||||
async def duplicate_paths(
|
||||
request: DuplicateRequest,
|
||||
service: DuplicateTaskService = Depends(get_duplicate_task_service),
|
||||
) -> TaskCreateResponse:
|
||||
return service.create_duplicate_task(paths=request.paths)
|
||||
@@ -172,6 +172,10 @@ class CopyRequest(BaseModel):
|
||||
destination_base: str | None = None
|
||||
|
||||
|
||||
class DuplicateRequest(BaseModel):
|
||||
paths: list[str]
|
||||
|
||||
|
||||
class TaskCreateResponse(BaseModel):
|
||||
task_id: str
|
||||
status: str
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -7,7 +7,7 @@ from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
VALID_HISTORY_STATUSES = {"queued", "completed", "failed", "requested", "ready", "preflight_failed", "cancelled"}
|
||||
VALID_HISTORY_OPERATIONS = {"mkdir", "rename", "delete", "copy", "move", "upload", "download"}
|
||||
VALID_HISTORY_OPERATIONS = {"mkdir", "rename", "delete", "copy", "move", "upload", "download", "duplicate"}
|
||||
|
||||
|
||||
class HistoryRepository:
|
||||
|
||||
@@ -7,7 +7,7 @@ from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
VALID_STATUSES = {"queued", "running", "completed", "failed", "requested", "preparing", "ready", "cancelled"}
|
||||
VALID_OPERATIONS = {"copy", "move", "download"}
|
||||
VALID_OPERATIONS = {"copy", "move", "download", "duplicate"}
|
||||
TASK_MIGRATION_COLUMNS: dict[str, str] = {
|
||||
"operation": "TEXT NOT NULL DEFAULT 'copy'",
|
||||
"status": "TEXT NOT NULL DEFAULT 'queued'",
|
||||
|
||||
@@ -14,6 +14,7 @@ from backend.app.services.bookmark_service import BookmarkService
|
||||
from backend.app.services.browse_service import BrowseService
|
||||
from backend.app.services.copy_task_service import CopyTaskService
|
||||
from backend.app.services.archive_download_task_service import ArchiveDownloadTaskService
|
||||
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
||||
from backend.app.services.file_ops_service import FileOpsService
|
||||
from backend.app.services.history_service import HistoryService
|
||||
from backend.app.services.move_task_service import MoveTaskService
|
||||
@@ -112,6 +113,15 @@ async def get_copy_task_service() -> CopyTaskService:
|
||||
)
|
||||
|
||||
|
||||
async def get_duplicate_task_service() -> DuplicateTaskService:
|
||||
return DuplicateTaskService(
|
||||
path_guard=get_path_guard(),
|
||||
repository=get_task_repository(),
|
||||
runner=get_task_runner(),
|
||||
history_repository=get_history_repository(),
|
||||
)
|
||||
|
||||
|
||||
async def get_move_task_service() -> MoveTaskService:
|
||||
return MoveTaskService(
|
||||
path_guard=get_path_guard(),
|
||||
|
||||
@@ -10,6 +10,7 @@ from backend.app.api.errors import AppError
|
||||
from backend.app.api.routes_bookmarks import router as bookmarks_router
|
||||
from backend.app.api.routes_browse import router as browse_router
|
||||
from backend.app.api.routes_copy import router as copy_router
|
||||
from backend.app.api.routes_duplicate import router as duplicate_router
|
||||
from backend.app.api.routes_files import router as files_router
|
||||
from backend.app.api.routes_history import router as history_router
|
||||
from backend.app.api.routes_move import router as move_router
|
||||
@@ -30,6 +31,7 @@ app.mount("/ui", StaticFiles(directory=str(UI_DIR), html=True), name="ui")
|
||||
app.include_router(browse_router, prefix="/api")
|
||||
app.include_router(files_router, prefix="/api")
|
||||
app.include_router(copy_router, prefix="/api")
|
||||
app.include_router(duplicate_router, prefix="/api")
|
||||
app.include_router(move_router, prefix="/api")
|
||||
app.include_router(search_router, prefix="/api")
|
||||
app.include_router(settings_router, prefix="/api")
|
||||
|
||||
Binary file not shown.
@@ -0,0 +1,188 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.schemas import TaskCreateResponse
|
||||
from backend.app.db.history_repository import HistoryRepository
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.security.path_guard import PathGuard, ResolvedPath
|
||||
from backend.app.tasks_runner import TaskRunner
|
||||
|
||||
|
||||
class DuplicateTaskService:
|
||||
def __init__(self, path_guard: PathGuard, repository: TaskRepository, runner: TaskRunner, history_repository: HistoryRepository | None = None):
|
||||
self._path_guard = path_guard
|
||||
self._repository = repository
|
||||
self._runner = runner
|
||||
self._history_repository = history_repository
|
||||
|
||||
def create_duplicate_task(self, paths: list[str] | None) -> TaskCreateResponse:
|
||||
if not paths:
|
||||
raise AppError(
|
||||
code="invalid_request",
|
||||
message="At least 1 path is required",
|
||||
status_code=400,
|
||||
)
|
||||
try:
|
||||
items: list[dict[str, str]] = []
|
||||
reserved_destinations: set[str] = set()
|
||||
for input_path in paths:
|
||||
item = self._build_duplicate_item(input_path, reserved_destinations)
|
||||
if item is None:
|
||||
continue
|
||||
reserved_destinations.add(item["destination_absolute"])
|
||||
items.append(item)
|
||||
|
||||
source_summary = self._source_summary(paths, items)
|
||||
destination_summary = self._destination_summary(items)
|
||||
task_id = str(uuid.uuid4())
|
||||
task = self._repository.create_task(
|
||||
operation="duplicate",
|
||||
source=source_summary,
|
||||
destination=destination_summary,
|
||||
task_id=task_id,
|
||||
)
|
||||
self._record_history(
|
||||
entry_id=task_id,
|
||||
operation="duplicate",
|
||||
status="queued",
|
||||
source=source_summary,
|
||||
destination=destination_summary,
|
||||
)
|
||||
self._runner.enqueue_duplicate_batch(
|
||||
task_id=task["id"],
|
||||
items=[
|
||||
{
|
||||
"source": item["source_absolute"],
|
||||
"destination": item["destination_absolute"],
|
||||
"kind": item["kind"],
|
||||
}
|
||||
for item in items
|
||||
],
|
||||
)
|
||||
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||
except AppError as exc:
|
||||
self._record_history(
|
||||
operation="duplicate",
|
||||
status="failed",
|
||||
source=paths[0] if len(paths) == 1 else f"{len(paths)} items",
|
||||
destination="same directory",
|
||||
error_code=exc.code,
|
||||
error_message=exc.message,
|
||||
finished_at=self._now_iso(),
|
||||
)
|
||||
raise
|
||||
|
||||
def _build_duplicate_item(self, source: str, reserved_destinations: set[str]) -> dict[str, str] | None:
|
||||
resolved_source = self._path_guard.resolve_existing_path(source)
|
||||
_, _, lexical_source, _ = self._path_guard.resolve_lexical_path(source)
|
||||
if self._should_skip_name(lexical_source.name):
|
||||
return None
|
||||
if lexical_source.is_symlink():
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Source must not be a symlink",
|
||||
status_code=409,
|
||||
details={"path": source},
|
||||
)
|
||||
|
||||
source_is_file = resolved_source.absolute.is_file()
|
||||
source_is_directory = resolved_source.absolute.is_dir()
|
||||
if not source_is_file and not source_is_directory:
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Unsupported source path type",
|
||||
status_code=409,
|
||||
details={"path": source},
|
||||
)
|
||||
|
||||
if source_is_directory:
|
||||
self._validate_directory_tree(resolved_source)
|
||||
|
||||
destination_absolute = self._next_duplicate_destination(resolved_source.absolute, reserved_destinations)
|
||||
destination_relative = self._path_guard.entry_relative_path(
|
||||
resolved_source.alias,
|
||||
destination_absolute,
|
||||
display_style=resolved_source.display_style,
|
||||
)
|
||||
|
||||
return {
|
||||
"source_relative": resolved_source.relative,
|
||||
"destination_relative": destination_relative,
|
||||
"source_absolute": str(resolved_source.absolute),
|
||||
"destination_absolute": str(destination_absolute),
|
||||
"kind": "directory" if source_is_directory else "file",
|
||||
}
|
||||
|
||||
def _validate_directory_tree(self, resolved_source: ResolvedPath) -> None:
|
||||
for root, dirnames, filenames in os.walk(resolved_source.absolute, followlinks=False):
|
||||
dirnames[:] = [name for name in dirnames if not self._should_skip_name(name)]
|
||||
root_path = Path(root)
|
||||
for name in [*dirnames, *filenames]:
|
||||
if self._should_skip_name(name):
|
||||
continue
|
||||
entry = root_path / name
|
||||
if entry.is_symlink():
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Source directory must not contain symlinks",
|
||||
status_code=409,
|
||||
details={"path": resolved_source.relative},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _next_duplicate_destination(cls, source: Path, reserved_destinations: set[str]) -> Path:
|
||||
parent = source.parent
|
||||
candidate_index = 1
|
||||
while True:
|
||||
candidate_name = cls._duplicate_name(source.name, source.is_file(), candidate_index)
|
||||
candidate = parent / candidate_name
|
||||
if not candidate.exists() and str(candidate) not in reserved_destinations:
|
||||
return candidate
|
||||
candidate_index += 1
|
||||
|
||||
@classmethod
|
||||
def _duplicate_name(cls, original_name: str, is_file: bool, index: int) -> str:
|
||||
if not is_file:
|
||||
suffix = ""
|
||||
base_name = original_name
|
||||
else:
|
||||
suffixes = Path(original_name).suffixes
|
||||
suffix = "".join(suffixes)
|
||||
base_name = original_name[: -len(suffix)] if suffix else original_name
|
||||
copy_suffix = " copy" if index == 1 else f" copy {index}"
|
||||
return f"{base_name}{copy_suffix}{suffix}"
|
||||
|
||||
@staticmethod
|
||||
def _should_skip_name(name: str) -> bool:
|
||||
return name.startswith("._")
|
||||
|
||||
@staticmethod
|
||||
def _source_summary(paths: list[str], items: list[dict[str, str]]) -> str:
|
||||
if len(paths) == 1:
|
||||
return paths[0]
|
||||
if not items:
|
||||
return "0 items"
|
||||
return DuplicateTaskService._items_label(len(items))
|
||||
|
||||
@staticmethod
|
||||
def _destination_summary(items: list[dict[str, str]]) -> str:
|
||||
if len(items) == 1:
|
||||
return items[0]["destination_relative"]
|
||||
return "same directory"
|
||||
|
||||
@staticmethod
|
||||
def _items_label(count: int) -> str:
|
||||
return "1 item" if count == 1 else f"{count} items"
|
||||
|
||||
def _record_history(self, **kwargs) -> None:
|
||||
if self._history_repository:
|
||||
self._history_repository.create_entry(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def _now_iso() -> str:
|
||||
return datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||
@@ -1,5 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
@@ -69,6 +71,14 @@ class TaskRunner:
|
||||
)
|
||||
thread.start()
|
||||
|
||||
def enqueue_duplicate_batch(self, task_id: str, items: list[dict[str, str]]) -> None:
|
||||
thread = threading.Thread(
|
||||
target=self._run_duplicate_batch,
|
||||
args=(task_id, items),
|
||||
daemon=True,
|
||||
)
|
||||
thread.start()
|
||||
|
||||
def enqueue_archive_prepare(self, worker) -> None:
|
||||
thread = threading.Thread(
|
||||
target=worker,
|
||||
@@ -322,6 +332,97 @@ class TaskRunner:
|
||||
)
|
||||
self._update_history_completed(task_id)
|
||||
|
||||
def _run_duplicate_batch(self, task_id: str, items: list[dict[str, str]]) -> None:
|
||||
total_items = len(items)
|
||||
current_item = items[0]["source"] if items else None
|
||||
self._repository.mark_running(
|
||||
task_id=task_id,
|
||||
done_items=0,
|
||||
total_items=total_items,
|
||||
current_item=current_item,
|
||||
)
|
||||
|
||||
completed_items = 0
|
||||
for index, item in enumerate(items):
|
||||
source = item["source"]
|
||||
destination = item["destination"]
|
||||
try:
|
||||
if item["kind"] == "directory":
|
||||
self._duplicate_directory(source=Path(source), destination=Path(destination))
|
||||
else:
|
||||
self._filesystem.copy_file(source=source, destination=destination)
|
||||
completed_items = index + 1
|
||||
next_item = items[index + 1]["source"] if index + 1 < total_items else source
|
||||
self._repository.update_progress(
|
||||
task_id=task_id,
|
||||
done_items=completed_items,
|
||||
total_items=total_items,
|
||||
current_item=next_item,
|
||||
)
|
||||
except OSError as exc:
|
||||
self._cleanup_partial_duplicate(Path(destination))
|
||||
self._repository.mark_failed(
|
||||
task_id=task_id,
|
||||
error_code="io_error",
|
||||
error_message=str(exc),
|
||||
failed_item=source,
|
||||
done_bytes=None,
|
||||
total_bytes=None,
|
||||
done_items=completed_items,
|
||||
total_items=total_items,
|
||||
)
|
||||
self._update_history_failed(task_id, str(exc))
|
||||
return
|
||||
|
||||
self._repository.mark_completed(
|
||||
task_id=task_id,
|
||||
done_items=total_items,
|
||||
total_items=total_items,
|
||||
)
|
||||
self._update_history_completed(task_id)
|
||||
|
||||
def _duplicate_directory(self, source: Path, destination: Path) -> None:
|
||||
destination.mkdir()
|
||||
copied_directories: list[tuple[Path, Path]] = [(source, destination)]
|
||||
try:
|
||||
for root, dirnames, filenames in os.walk(source, topdown=True, followlinks=False):
|
||||
root_path = Path(root)
|
||||
target_root = destination / root_path.relative_to(source)
|
||||
dirnames[:] = [name for name in dirnames if not name.startswith("._")]
|
||||
|
||||
for name in dirnames:
|
||||
source_dir = root_path / name
|
||||
if source_dir.is_symlink():
|
||||
raise OSError("Source directory must not contain symlinks")
|
||||
target_dir = target_root / name
|
||||
target_dir.mkdir()
|
||||
copied_directories.append((source_dir, target_dir))
|
||||
|
||||
for name in filenames:
|
||||
if name.startswith("._"):
|
||||
continue
|
||||
source_file = root_path / name
|
||||
if source_file.is_symlink():
|
||||
raise OSError("Source directory must not contain symlinks")
|
||||
self._filesystem.copy_file(
|
||||
source=str(source_file),
|
||||
destination=str(target_root / name),
|
||||
)
|
||||
|
||||
for source_dir, target_dir in reversed(copied_directories):
|
||||
shutil.copystat(source_dir, target_dir, follow_symlinks=False)
|
||||
except Exception:
|
||||
self._cleanup_partial_duplicate(destination)
|
||||
raise
|
||||
|
||||
def _cleanup_partial_duplicate(self, path: Path) -> None:
|
||||
if not path.exists():
|
||||
return
|
||||
if path.is_dir():
|
||||
shutil.rmtree(path)
|
||||
return
|
||||
path.unlink()
|
||||
|
||||
def _update_history_completed(self, task_id: str) -> None:
|
||||
if self._history_repository:
|
||||
self._history_repository.update_entry(entry_id=task_id, status="completed")
|
||||
|
||||
BIN
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,202 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_duplicate_task_service, get_task_service
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
||||
from backend.app.services.task_service import TaskService
|
||||
from backend.app.tasks_runner import TaskRunner
|
||||
|
||||
|
||||
class FailOnSecondCopyFilesystemAdapter(FilesystemAdapter):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._copy_calls = 0
|
||||
|
||||
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
||||
self._copy_calls += 1
|
||||
if self._copy_calls == 2:
|
||||
raise OSError("forced duplicate failure")
|
||||
super().copy_file(source=source, destination=destination, on_progress=on_progress)
|
||||
|
||||
|
||||
class DuplicateApiGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.root = Path(self.temp_dir.name) / "root"
|
||||
self.root.mkdir(parents=True, exist_ok=True)
|
||||
self.repo = TaskRepository(str(Path(self.temp_dir.name) / "tasks.db"))
|
||||
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
||||
self._set_services(path_guard=path_guard, filesystem=FilesystemAdapter())
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _set_services(self, path_guard: PathGuard, filesystem: FilesystemAdapter) -> None:
|
||||
runner = TaskRunner(repository=self.repo, filesystem=filesystem)
|
||||
duplicate_service = DuplicateTaskService(path_guard=path_guard, repository=self.repo, runner=runner)
|
||||
task_service = TaskService(repository=self.repo)
|
||||
|
||||
async def _override_duplicate_service() -> DuplicateTaskService:
|
||||
return duplicate_service
|
||||
|
||||
async def _override_task_service() -> TaskService:
|
||||
return task_service
|
||||
|
||||
app.dependency_overrides[get_duplicate_task_service] = _override_duplicate_service
|
||||
app.dependency_overrides[get_task_service] = _override_task_service
|
||||
|
||||
def _request(self, method: str, url: str, payload: dict | None = None) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
if method == "POST":
|
||||
return await client.post(url, json=payload)
|
||||
return await client.get(url)
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
def _wait_task(self, task_id: str, timeout_s: float = 2.0) -> dict:
|
||||
deadline = time.time() + timeout_s
|
||||
while time.time() < deadline:
|
||||
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||
body = response.json()
|
||||
if body["status"] in {"completed", "failed"}:
|
||||
return body
|
||||
time.sleep(0.02)
|
||||
self.fail("task did not reach terminal state in time")
|
||||
|
||||
def test_duplicate_single_file_success(self) -> None:
|
||||
(self.root / "note.txt").write_text("hello", encoding="utf-8")
|
||||
|
||||
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/note.txt"]})
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
detail = self._wait_task(response.json()["task_id"])
|
||||
self.assertEqual(detail["operation"], "duplicate")
|
||||
self.assertEqual(detail["status"], "completed")
|
||||
self.assertEqual(detail["done_items"], 1)
|
||||
self.assertEqual(detail["total_items"], 1)
|
||||
self.assertEqual(detail["source"], "storage1/note.txt")
|
||||
self.assertEqual(detail["destination"], "storage1/note copy.txt")
|
||||
self.assertEqual((self.root / "note copy.txt").read_text(encoding="utf-8"), "hello")
|
||||
|
||||
def test_duplicate_single_directory_success(self) -> None:
|
||||
(self.root / "Folder" / "nested").mkdir(parents=True)
|
||||
(self.root / "Folder" / "alpha.txt").write_text("A", encoding="utf-8")
|
||||
(self.root / "Folder" / "nested" / "beta.txt").write_text("B", encoding="utf-8")
|
||||
|
||||
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/Folder"]})
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
detail = self._wait_task(response.json()["task_id"])
|
||||
self.assertEqual(detail["status"], "completed")
|
||||
self.assertEqual(detail["done_items"], 1)
|
||||
self.assertEqual(detail["total_items"], 1)
|
||||
self.assertTrue((self.root / "Folder copy").is_dir())
|
||||
self.assertEqual((self.root / "Folder copy" / "alpha.txt").read_text(encoding="utf-8"), "A")
|
||||
self.assertEqual((self.root / "Folder copy" / "nested" / "beta.txt").read_text(encoding="utf-8"), "B")
|
||||
|
||||
def test_duplicate_multi_select_success(self) -> None:
|
||||
(self.root / "a.txt").write_text("A", encoding="utf-8")
|
||||
(self.root / "docs" / "nested").mkdir(parents=True)
|
||||
(self.root / "docs" / "nested" / "b.txt").write_text("B", encoding="utf-8")
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/duplicate",
|
||||
{"paths": ["storage1/a.txt", "storage1/docs"]},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
detail = self._wait_task(response.json()["task_id"])
|
||||
self.assertEqual(detail["status"], "completed")
|
||||
self.assertEqual(detail["done_items"], 2)
|
||||
self.assertEqual(detail["total_items"], 2)
|
||||
self.assertEqual(detail["source"], "2 items")
|
||||
self.assertEqual(detail["destination"], "same directory")
|
||||
self.assertEqual((self.root / "a copy.txt").read_text(encoding="utf-8"), "A")
|
||||
self.assertEqual((self.root / "docs copy" / "nested" / "b.txt").read_text(encoding="utf-8"), "B")
|
||||
|
||||
def test_duplicate_collision_resolution_for_files_and_directories(self) -> None:
|
||||
(self.root / "report.txt").write_text("R", encoding="utf-8")
|
||||
(self.root / "report copy.txt").write_text("existing", encoding="utf-8")
|
||||
(self.root / "report copy 2.txt").write_text("existing", encoding="utf-8")
|
||||
(self.root / "Album").mkdir()
|
||||
(self.root / "Album copy").mkdir()
|
||||
(self.root / "Album copy 2").mkdir()
|
||||
|
||||
file_response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/report.txt"]})
|
||||
dir_response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/Album"]})
|
||||
|
||||
self.assertEqual(self._wait_task(file_response.json()["task_id"])["destination"], "storage1/report copy 3.txt")
|
||||
self.assertEqual(self._wait_task(dir_response.json()["task_id"])["destination"], "storage1/Album copy 3")
|
||||
self.assertTrue((self.root / "report copy 3.txt").exists())
|
||||
self.assertTrue((self.root / "Album copy 3").is_dir())
|
||||
|
||||
def test_duplicate_skips_top_level_macos_sidecar(self) -> None:
|
||||
(self.root / "._note.txt").write_text("sidecar", encoding="utf-8")
|
||||
|
||||
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/._note.txt"]})
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
detail = self._wait_task(response.json()["task_id"])
|
||||
self.assertEqual(detail["status"], "completed")
|
||||
self.assertEqual(detail["done_items"], 0)
|
||||
self.assertEqual(detail["total_items"], 0)
|
||||
self.assertFalse((self.root / "._note copy.txt").exists())
|
||||
|
||||
def test_duplicate_skips_macos_sidecars_inside_directory_tree(self) -> None:
|
||||
(self.root / "docs" / "nested").mkdir(parents=True)
|
||||
(self.root / "docs" / "keep.txt").write_text("keep", encoding="utf-8")
|
||||
(self.root / "docs" / "._skip.txt").write_text("skip", encoding="utf-8")
|
||||
(self.root / "docs" / "nested" / "._nested.txt").write_text("skip", encoding="utf-8")
|
||||
(self.root / "docs" / "nested" / "real.txt").write_text("real", encoding="utf-8")
|
||||
|
||||
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/docs"]})
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
self._wait_task(response.json()["task_id"])
|
||||
self.assertTrue((self.root / "docs copy" / "keep.txt").exists())
|
||||
self.assertTrue((self.root / "docs copy" / "nested" / "real.txt").exists())
|
||||
self.assertFalse((self.root / "docs copy" / "._skip.txt").exists())
|
||||
self.assertFalse((self.root / "docs copy" / "nested" / "._nested.txt").exists())
|
||||
|
||||
def test_duplicate_failure_removes_partial_directory_artifact(self) -> None:
|
||||
(self.root / "docs").mkdir()
|
||||
(self.root / "docs" / "a.txt").write_text("A", encoding="utf-8")
|
||||
(self.root / "docs" / "b.txt").write_text("B", encoding="utf-8")
|
||||
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
||||
self._set_services(path_guard=path_guard, filesystem=FailOnSecondCopyFilesystemAdapter())
|
||||
|
||||
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/docs"]})
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
detail = self._wait_task(response.json()["task_id"])
|
||||
self.assertEqual(detail["status"], "failed")
|
||||
self.assertEqual(detail["error_code"], "io_error")
|
||||
self.assertFalse((self.root / "docs copy").exists())
|
||||
|
||||
def test_duplicate_path_safety_stays_intact(self) -> None:
|
||||
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/../escape.txt"]})
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -12,7 +12,7 @@ import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_archive_download_task_service, get_copy_task_service, get_file_ops_service, get_history_service, get_move_task_service, get_task_service
|
||||
from backend.app.dependencies import get_archive_download_task_service, get_copy_task_service, get_duplicate_task_service, get_file_ops_service, get_history_service, get_move_task_service, get_task_service
|
||||
from backend.app.db.history_repository import HistoryRepository
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
@@ -20,6 +20,7 @@ from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.archive_download_task_service import ArchiveDownloadTaskService
|
||||
from backend.app.services.copy_task_service import CopyTaskService
|
||||
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
||||
from backend.app.services.file_ops_service import FileOpsService
|
||||
from backend.app.services.history_service import HistoryService
|
||||
from backend.app.services.move_task_service import MoveTaskService
|
||||
@@ -77,6 +78,7 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
||||
artifact_root=self.artifact_root,
|
||||
)
|
||||
copy_service = CopyTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
||||
duplicate_service = DuplicateTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
||||
move_service = MoveTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
||||
task_service = TaskService(repository=self.task_repo)
|
||||
history_service = HistoryService(repository=self.history_repo)
|
||||
@@ -90,6 +92,9 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
||||
async def _override_copy_service() -> CopyTaskService:
|
||||
return copy_service
|
||||
|
||||
async def _override_duplicate_service() -> DuplicateTaskService:
|
||||
return duplicate_service
|
||||
|
||||
async def _override_move_service() -> MoveTaskService:
|
||||
return move_service
|
||||
|
||||
@@ -102,6 +107,7 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
||||
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||
app.dependency_overrides[get_archive_download_task_service] = _override_archive_service
|
||||
app.dependency_overrides[get_copy_task_service] = _override_copy_service
|
||||
app.dependency_overrides[get_duplicate_task_service] = _override_duplicate_service
|
||||
app.dependency_overrides[get_move_task_service] = _override_move_service
|
||||
app.dependency_overrides[get_task_service] = _override_task_service
|
||||
app.dependency_overrides[get_history_service] = _override_history_service
|
||||
@@ -209,6 +215,20 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
||||
self.assertEqual(history[0]['status'], 'failed')
|
||||
self.assertEqual(history[0]['error_code'], 'io_error')
|
||||
|
||||
def test_duplicate_completed_history_item(self) -> None:
|
||||
(self.root1 / 'report.txt').write_text('hello', encoding='utf-8')
|
||||
|
||||
response = self._request('POST', '/api/files/duplicate', {'paths': ['storage1/report.txt']})
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
self._wait_task(response.json()['task_id'])
|
||||
|
||||
history = self._request('GET', '/api/history').json()['items']
|
||||
self.assertEqual(history[0]['operation'], 'duplicate')
|
||||
self.assertEqual(history[0]['status'], 'completed')
|
||||
self.assertEqual(history[0]['source'], 'storage1/report.txt')
|
||||
self.assertEqual(history[0]['destination'], 'storage1/report copy.txt')
|
||||
|
||||
def test_single_file_download_writes_ready_history_item(self) -> None:
|
||||
(self.root1 / 'report.txt').write_text('hello download', encoding='utf-8')
|
||||
|
||||
|
||||
@@ -263,6 +263,30 @@ class TasksApiGoldenTest(unittest.TestCase):
|
||||
self.assertEqual(body["status"], "ready")
|
||||
self.assertEqual(body["destination"], "docs.zip")
|
||||
|
||||
def test_get_task_detail_duplicate_completed(self) -> None:
|
||||
self._insert_task(
|
||||
task_id="task-duplicate",
|
||||
operation="duplicate",
|
||||
status="completed",
|
||||
source="storage1/report.txt",
|
||||
destination="storage1/report copy.txt",
|
||||
created_at="2026-03-10T10:00:00Z",
|
||||
started_at="2026-03-10T10:00:01Z",
|
||||
finished_at="2026-03-10T10:00:03Z",
|
||||
done_items=1,
|
||||
total_items=1,
|
||||
current_item="storage1/report.txt",
|
||||
)
|
||||
|
||||
response = self._get("/api/tasks/task-duplicate")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = response.json()
|
||||
self.assertEqual(body["operation"], "duplicate")
|
||||
self.assertEqual(body["status"], "completed")
|
||||
self.assertEqual(body["done_items"], 1)
|
||||
self.assertEqual(body["total_items"], 1)
|
||||
|
||||
def test_get_task_detail_requested_archive_download(self) -> None:
|
||||
self._insert_task(
|
||||
task_id="task-download-requested",
|
||||
|
||||
Binary file not shown.
@@ -80,6 +80,18 @@ class TaskRepositoryTest(unittest.TestCase):
|
||||
self.assertEqual(task["status"], "requested")
|
||||
self.assertEqual(artifact["file_name"], "docs.zip")
|
||||
|
||||
def test_create_duplicate_task_is_allowed(self) -> None:
|
||||
created = self.repo.create_task(
|
||||
operation="duplicate",
|
||||
source="storage1/report.txt",
|
||||
destination="storage1/report copy.txt",
|
||||
)
|
||||
|
||||
task = self.repo.get_task(created["id"])
|
||||
|
||||
self.assertEqual(task["operation"], "duplicate")
|
||||
self.assertEqual(task["status"], "queued")
|
||||
|
||||
def test_mark_cancelled_transitions_requested_download_task(self) -> None:
|
||||
created = self.repo.create_task(
|
||||
operation="download",
|
||||
|
||||
Reference in New Issue
Block a user