feature: duplicate 01

This commit is contained in:
kodi
2026-03-14 17:20:36 +01:00
parent 14600dd5b6
commit 7f7665880f
27 changed files with 583 additions and 3 deletions
Binary file not shown.
+17
View File
@@ -0,0 +1,17 @@
from __future__ import annotations
from fastapi import APIRouter, Depends
from backend.app.api.schemas import DuplicateRequest, TaskCreateResponse
from backend.app.dependencies import get_duplicate_task_service
from backend.app.services.duplicate_task_service import DuplicateTaskService
router = APIRouter(prefix="/files")
@router.post("/duplicate", response_model=TaskCreateResponse, status_code=202)
async def duplicate_paths(
request: DuplicateRequest,
service: DuplicateTaskService = Depends(get_duplicate_task_service),
) -> TaskCreateResponse:
return service.create_duplicate_task(paths=request.paths)
+4
View File
@@ -172,6 +172,10 @@ class CopyRequest(BaseModel):
destination_base: str | None = None
class DuplicateRequest(BaseModel):
paths: list[str]
class TaskCreateResponse(BaseModel):
task_id: str
status: str
+1 -1
View File
@@ -7,7 +7,7 @@ from datetime import datetime, timezone
from pathlib import Path
VALID_HISTORY_STATUSES = {"queued", "completed", "failed", "requested", "ready", "preflight_failed", "cancelled"}
VALID_HISTORY_OPERATIONS = {"mkdir", "rename", "delete", "copy", "move", "upload", "download"}
VALID_HISTORY_OPERATIONS = {"mkdir", "rename", "delete", "copy", "move", "upload", "download", "duplicate"}
class HistoryRepository:
+1 -1
View File
@@ -7,7 +7,7 @@ from datetime import datetime, timezone
from pathlib import Path
VALID_STATUSES = {"queued", "running", "completed", "failed", "requested", "preparing", "ready", "cancelled"}
VALID_OPERATIONS = {"copy", "move", "download"}
VALID_OPERATIONS = {"copy", "move", "download", "duplicate"}
TASK_MIGRATION_COLUMNS: dict[str, str] = {
"operation": "TEXT NOT NULL DEFAULT 'copy'",
"status": "TEXT NOT NULL DEFAULT 'queued'",
+10
View File
@@ -14,6 +14,7 @@ from backend.app.services.bookmark_service import BookmarkService
from backend.app.services.browse_service import BrowseService
from backend.app.services.copy_task_service import CopyTaskService
from backend.app.services.archive_download_task_service import ArchiveDownloadTaskService
from backend.app.services.duplicate_task_service import DuplicateTaskService
from backend.app.services.file_ops_service import FileOpsService
from backend.app.services.history_service import HistoryService
from backend.app.services.move_task_service import MoveTaskService
@@ -112,6 +113,15 @@ async def get_copy_task_service() -> CopyTaskService:
)
async def get_duplicate_task_service() -> DuplicateTaskService:
return DuplicateTaskService(
path_guard=get_path_guard(),
repository=get_task_repository(),
runner=get_task_runner(),
history_repository=get_history_repository(),
)
async def get_move_task_service() -> MoveTaskService:
return MoveTaskService(
path_guard=get_path_guard(),
+2
View File
@@ -10,6 +10,7 @@ from backend.app.api.errors import AppError
from backend.app.api.routes_bookmarks import router as bookmarks_router
from backend.app.api.routes_browse import router as browse_router
from backend.app.api.routes_copy import router as copy_router
from backend.app.api.routes_duplicate import router as duplicate_router
from backend.app.api.routes_files import router as files_router
from backend.app.api.routes_history import router as history_router
from backend.app.api.routes_move import router as move_router
@@ -30,6 +31,7 @@ app.mount("/ui", StaticFiles(directory=str(UI_DIR), html=True), name="ui")
app.include_router(browse_router, prefix="/api")
app.include_router(files_router, prefix="/api")
app.include_router(copy_router, prefix="/api")
app.include_router(duplicate_router, prefix="/api")
app.include_router(move_router, prefix="/api")
app.include_router(search_router, prefix="/api")
app.include_router(settings_router, prefix="/api")
@@ -0,0 +1,188 @@
from __future__ import annotations
import os
import uuid
from datetime import datetime, timezone
from pathlib import Path
from backend.app.api.errors import AppError
from backend.app.api.schemas import TaskCreateResponse
from backend.app.db.history_repository import HistoryRepository
from backend.app.db.task_repository import TaskRepository
from backend.app.security.path_guard import PathGuard, ResolvedPath
from backend.app.tasks_runner import TaskRunner
class DuplicateTaskService:
def __init__(self, path_guard: PathGuard, repository: TaskRepository, runner: TaskRunner, history_repository: HistoryRepository | None = None):
self._path_guard = path_guard
self._repository = repository
self._runner = runner
self._history_repository = history_repository
def create_duplicate_task(self, paths: list[str] | None) -> TaskCreateResponse:
if not paths:
raise AppError(
code="invalid_request",
message="At least 1 path is required",
status_code=400,
)
try:
items: list[dict[str, str]] = []
reserved_destinations: set[str] = set()
for input_path in paths:
item = self._build_duplicate_item(input_path, reserved_destinations)
if item is None:
continue
reserved_destinations.add(item["destination_absolute"])
items.append(item)
source_summary = self._source_summary(paths, items)
destination_summary = self._destination_summary(items)
task_id = str(uuid.uuid4())
task = self._repository.create_task(
operation="duplicate",
source=source_summary,
destination=destination_summary,
task_id=task_id,
)
self._record_history(
entry_id=task_id,
operation="duplicate",
status="queued",
source=source_summary,
destination=destination_summary,
)
self._runner.enqueue_duplicate_batch(
task_id=task["id"],
items=[
{
"source": item["source_absolute"],
"destination": item["destination_absolute"],
"kind": item["kind"],
}
for item in items
],
)
return TaskCreateResponse(task_id=task["id"], status=task["status"])
except AppError as exc:
self._record_history(
operation="duplicate",
status="failed",
source=paths[0] if len(paths) == 1 else f"{len(paths)} items",
destination="same directory",
error_code=exc.code,
error_message=exc.message,
finished_at=self._now_iso(),
)
raise
def _build_duplicate_item(self, source: str, reserved_destinations: set[str]) -> dict[str, str] | None:
resolved_source = self._path_guard.resolve_existing_path(source)
_, _, lexical_source, _ = self._path_guard.resolve_lexical_path(source)
if self._should_skip_name(lexical_source.name):
return None
if lexical_source.is_symlink():
raise AppError(
code="type_conflict",
message="Source must not be a symlink",
status_code=409,
details={"path": source},
)
source_is_file = resolved_source.absolute.is_file()
source_is_directory = resolved_source.absolute.is_dir()
if not source_is_file and not source_is_directory:
raise AppError(
code="type_conflict",
message="Unsupported source path type",
status_code=409,
details={"path": source},
)
if source_is_directory:
self._validate_directory_tree(resolved_source)
destination_absolute = self._next_duplicate_destination(resolved_source.absolute, reserved_destinations)
destination_relative = self._path_guard.entry_relative_path(
resolved_source.alias,
destination_absolute,
display_style=resolved_source.display_style,
)
return {
"source_relative": resolved_source.relative,
"destination_relative": destination_relative,
"source_absolute": str(resolved_source.absolute),
"destination_absolute": str(destination_absolute),
"kind": "directory" if source_is_directory else "file",
}
def _validate_directory_tree(self, resolved_source: ResolvedPath) -> None:
for root, dirnames, filenames in os.walk(resolved_source.absolute, followlinks=False):
dirnames[:] = [name for name in dirnames if not self._should_skip_name(name)]
root_path = Path(root)
for name in [*dirnames, *filenames]:
if self._should_skip_name(name):
continue
entry = root_path / name
if entry.is_symlink():
raise AppError(
code="type_conflict",
message="Source directory must not contain symlinks",
status_code=409,
details={"path": resolved_source.relative},
)
@classmethod
def _next_duplicate_destination(cls, source: Path, reserved_destinations: set[str]) -> Path:
parent = source.parent
candidate_index = 1
while True:
candidate_name = cls._duplicate_name(source.name, source.is_file(), candidate_index)
candidate = parent / candidate_name
if not candidate.exists() and str(candidate) not in reserved_destinations:
return candidate
candidate_index += 1
@classmethod
def _duplicate_name(cls, original_name: str, is_file: bool, index: int) -> str:
if not is_file:
suffix = ""
base_name = original_name
else:
suffixes = Path(original_name).suffixes
suffix = "".join(suffixes)
base_name = original_name[: -len(suffix)] if suffix else original_name
copy_suffix = " copy" if index == 1 else f" copy {index}"
return f"{base_name}{copy_suffix}{suffix}"
@staticmethod
def _should_skip_name(name: str) -> bool:
return name.startswith("._")
@staticmethod
def _source_summary(paths: list[str], items: list[dict[str, str]]) -> str:
if len(paths) == 1:
return paths[0]
if not items:
return "0 items"
return DuplicateTaskService._items_label(len(items))
@staticmethod
def _destination_summary(items: list[dict[str, str]]) -> str:
if len(items) == 1:
return items[0]["destination_relative"]
return "same directory"
@staticmethod
def _items_label(count: int) -> str:
return "1 item" if count == 1 else f"{count} items"
def _record_history(self, **kwargs) -> None:
if self._history_repository:
self._history_repository.create_entry(**kwargs)
@staticmethod
def _now_iso() -> str:
return datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
+101
View File
@@ -1,5 +1,7 @@
from __future__ import annotations
import os
import shutil
import threading
from pathlib import Path
@@ -69,6 +71,14 @@ class TaskRunner:
)
thread.start()
def enqueue_duplicate_batch(self, task_id: str, items: list[dict[str, str]]) -> None:
thread = threading.Thread(
target=self._run_duplicate_batch,
args=(task_id, items),
daemon=True,
)
thread.start()
def enqueue_archive_prepare(self, worker) -> None:
thread = threading.Thread(
target=worker,
@@ -322,6 +332,97 @@ class TaskRunner:
)
self._update_history_completed(task_id)
def _run_duplicate_batch(self, task_id: str, items: list[dict[str, str]]) -> None:
total_items = len(items)
current_item = items[0]["source"] if items else None
self._repository.mark_running(
task_id=task_id,
done_items=0,
total_items=total_items,
current_item=current_item,
)
completed_items = 0
for index, item in enumerate(items):
source = item["source"]
destination = item["destination"]
try:
if item["kind"] == "directory":
self._duplicate_directory(source=Path(source), destination=Path(destination))
else:
self._filesystem.copy_file(source=source, destination=destination)
completed_items = index + 1
next_item = items[index + 1]["source"] if index + 1 < total_items else source
self._repository.update_progress(
task_id=task_id,
done_items=completed_items,
total_items=total_items,
current_item=next_item,
)
except OSError as exc:
self._cleanup_partial_duplicate(Path(destination))
self._repository.mark_failed(
task_id=task_id,
error_code="io_error",
error_message=str(exc),
failed_item=source,
done_bytes=None,
total_bytes=None,
done_items=completed_items,
total_items=total_items,
)
self._update_history_failed(task_id, str(exc))
return
self._repository.mark_completed(
task_id=task_id,
done_items=total_items,
total_items=total_items,
)
self._update_history_completed(task_id)
def _duplicate_directory(self, source: Path, destination: Path) -> None:
destination.mkdir()
copied_directories: list[tuple[Path, Path]] = [(source, destination)]
try:
for root, dirnames, filenames in os.walk(source, topdown=True, followlinks=False):
root_path = Path(root)
target_root = destination / root_path.relative_to(source)
dirnames[:] = [name for name in dirnames if not name.startswith("._")]
for name in dirnames:
source_dir = root_path / name
if source_dir.is_symlink():
raise OSError("Source directory must not contain symlinks")
target_dir = target_root / name
target_dir.mkdir()
copied_directories.append((source_dir, target_dir))
for name in filenames:
if name.startswith("._"):
continue
source_file = root_path / name
if source_file.is_symlink():
raise OSError("Source directory must not contain symlinks")
self._filesystem.copy_file(
source=str(source_file),
destination=str(target_root / name),
)
for source_dir, target_dir in reversed(copied_directories):
shutil.copystat(source_dir, target_dir, follow_symlinks=False)
except Exception:
self._cleanup_partial_duplicate(destination)
raise
def _cleanup_partial_duplicate(self, path: Path) -> None:
if not path.exists():
return
if path.is_dir():
shutil.rmtree(path)
return
path.unlink()
def _update_history_completed(self, task_id: str) -> None:
if self._history_repository:
self._history_repository.update_entry(entry_id=task_id, status="completed")