upload volledige repo
This commit is contained in:
@@ -0,0 +1 @@
|
||||
"""Backend package."""
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
"""Application package."""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
"""API routes and schemas."""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class AppError(Exception):
|
||||
code: str
|
||||
message: str
|
||||
status_code: int
|
||||
details: dict[str, str] | None = None
|
||||
@@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from backend.app.api.schemas import (
|
||||
BookmarkCreateRequest,
|
||||
BookmarkDeleteResponse,
|
||||
BookmarkItem,
|
||||
BookmarkListResponse,
|
||||
)
|
||||
from backend.app.dependencies import get_bookmark_service
|
||||
from backend.app.services.bookmark_service import BookmarkService
|
||||
|
||||
router = APIRouter(prefix="/bookmarks")
|
||||
|
||||
|
||||
@router.post("", response_model=BookmarkItem)
|
||||
async def create_bookmark(
|
||||
request: BookmarkCreateRequest,
|
||||
service: BookmarkService = Depends(get_bookmark_service),
|
||||
) -> BookmarkItem:
|
||||
return service.create_bookmark(path=request.path, label=request.label)
|
||||
|
||||
|
||||
@router.get("", response_model=BookmarkListResponse)
|
||||
async def list_bookmarks(service: BookmarkService = Depends(get_bookmark_service)) -> BookmarkListResponse:
|
||||
return service.list_bookmarks()
|
||||
|
||||
|
||||
@router.delete("/{bookmark_id}", response_model=BookmarkDeleteResponse)
|
||||
async def delete_bookmark(
|
||||
bookmark_id: int,
|
||||
service: BookmarkService = Depends(get_bookmark_service),
|
||||
) -> BookmarkDeleteResponse:
|
||||
return service.delete_bookmark(bookmark_id)
|
||||
@@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
|
||||
from backend.app.api.schemas import BrowseResponse
|
||||
from backend.app.dependencies import get_browse_service
|
||||
from backend.app.services.browse_service import BrowseService
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/browse", response_model=BrowseResponse)
|
||||
async def browse(
|
||||
path: str = Query(...),
|
||||
show_hidden: bool = Query(False),
|
||||
service: BrowseService = Depends(get_browse_service),
|
||||
) -> BrowseResponse:
|
||||
return service.browse(path=path, show_hidden=show_hidden)
|
||||
@@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from backend.app.api.schemas import CopyRequest, TaskCreateResponse
|
||||
from backend.app.dependencies import get_copy_task_service
|
||||
from backend.app.services.copy_task_service import CopyTaskService
|
||||
|
||||
router = APIRouter(prefix="/files")
|
||||
|
||||
|
||||
@router.post("/copy", response_model=TaskCreateResponse, status_code=202)
|
||||
async def copy_file(
|
||||
request: CopyRequest,
|
||||
service: CopyTaskService = Depends(get_copy_task_service),
|
||||
) -> TaskCreateResponse:
|
||||
return service.create_copy_task(source=request.source, destination=request.destination)
|
||||
@@ -0,0 +1,33 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from backend.app.api.schemas import DeleteRequest, DeleteResponse, MkdirRequest, MkdirResponse, RenameRequest, RenameResponse
|
||||
from backend.app.dependencies import get_file_ops_service
|
||||
from backend.app.services.file_ops_service import FileOpsService
|
||||
|
||||
router = APIRouter(prefix="/files")
|
||||
|
||||
|
||||
@router.post("/mkdir", response_model=MkdirResponse)
|
||||
async def mkdir(
|
||||
request: MkdirRequest,
|
||||
service: FileOpsService = Depends(get_file_ops_service),
|
||||
) -> MkdirResponse:
|
||||
return service.mkdir(parent_path=request.parent_path, name=request.name)
|
||||
|
||||
|
||||
@router.post("/rename", response_model=RenameResponse)
|
||||
async def rename(
|
||||
request: RenameRequest,
|
||||
service: FileOpsService = Depends(get_file_ops_service),
|
||||
) -> RenameResponse:
|
||||
return service.rename(path=request.path, new_name=request.new_name)
|
||||
|
||||
|
||||
@router.post("/delete", response_model=DeleteResponse)
|
||||
async def delete(
|
||||
request: DeleteRequest,
|
||||
service: FileOpsService = Depends(get_file_ops_service),
|
||||
) -> DeleteResponse:
|
||||
return service.delete(path=request.path)
|
||||
@@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from backend.app.api.schemas import MoveRequest, TaskCreateResponse
|
||||
from backend.app.dependencies import get_move_task_service
|
||||
from backend.app.services.move_task_service import MoveTaskService
|
||||
|
||||
router = APIRouter(prefix="/files")
|
||||
|
||||
|
||||
@router.post("/move", response_model=TaskCreateResponse, status_code=202)
|
||||
async def move_file(
|
||||
request: MoveRequest,
|
||||
service: MoveTaskService = Depends(get_move_task_service),
|
||||
) -> TaskCreateResponse:
|
||||
return service.create_move_task(source=request.source, destination=request.destination)
|
||||
@@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from backend.app.api.schemas import TaskDetailResponse, TaskListResponse
|
||||
from backend.app.dependencies import get_task_service
|
||||
from backend.app.services.task_service import TaskService
|
||||
|
||||
router = APIRouter(prefix="/tasks")
|
||||
|
||||
|
||||
@router.get("", response_model=TaskListResponse)
|
||||
async def list_tasks(service: TaskService = Depends(get_task_service)) -> TaskListResponse:
|
||||
return service.list_tasks()
|
||||
|
||||
|
||||
@router.get("/{task_id}", response_model=TaskDetailResponse)
|
||||
async def get_task(task_id: str, service: TaskService = Depends(get_task_service)) -> TaskDetailResponse:
|
||||
return service.get_task(task_id)
|
||||
@@ -0,0 +1,126 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class ErrorBody(BaseModel):
|
||||
code: str
|
||||
message: str
|
||||
details: dict[str, str] | None = None
|
||||
|
||||
|
||||
class ErrorResponse(BaseModel):
|
||||
error: ErrorBody
|
||||
|
||||
|
||||
class DirectoryEntry(BaseModel):
|
||||
name: str
|
||||
path: str
|
||||
modified: str
|
||||
|
||||
|
||||
class FileEntry(BaseModel):
|
||||
name: str
|
||||
path: str
|
||||
size: int
|
||||
modified: str
|
||||
|
||||
|
||||
class BrowseResponse(BaseModel):
|
||||
path: str
|
||||
directories: list[DirectoryEntry]
|
||||
files: list[FileEntry]
|
||||
|
||||
|
||||
class MkdirRequest(BaseModel):
|
||||
parent_path: str
|
||||
name: str
|
||||
|
||||
|
||||
class MkdirResponse(BaseModel):
|
||||
path: str
|
||||
|
||||
|
||||
class RenameRequest(BaseModel):
|
||||
path: str
|
||||
new_name: str
|
||||
|
||||
|
||||
class RenameResponse(BaseModel):
|
||||
path: str
|
||||
|
||||
|
||||
class DeleteRequest(BaseModel):
|
||||
path: str
|
||||
|
||||
|
||||
class DeleteResponse(BaseModel):
|
||||
path: str
|
||||
|
||||
|
||||
class TaskListItem(BaseModel):
|
||||
id: str
|
||||
operation: str
|
||||
status: str
|
||||
source: str
|
||||
destination: str
|
||||
created_at: str
|
||||
finished_at: str | None = None
|
||||
|
||||
|
||||
class TaskListResponse(BaseModel):
|
||||
items: list[TaskListItem]
|
||||
|
||||
|
||||
class TaskDetailResponse(BaseModel):
|
||||
id: str
|
||||
operation: str
|
||||
status: str
|
||||
source: str
|
||||
destination: str
|
||||
done_bytes: int | None = None
|
||||
total_bytes: int | None = None
|
||||
done_items: int | None = None
|
||||
total_items: int | None = None
|
||||
current_item: str | None = None
|
||||
failed_item: str | None = None
|
||||
error_code: str | None = None
|
||||
error_message: str | None = None
|
||||
created_at: str
|
||||
started_at: str | None = None
|
||||
finished_at: str | None = None
|
||||
|
||||
|
||||
class CopyRequest(BaseModel):
|
||||
source: str
|
||||
destination: str
|
||||
|
||||
|
||||
class TaskCreateResponse(BaseModel):
|
||||
task_id: str
|
||||
status: str
|
||||
|
||||
|
||||
class MoveRequest(BaseModel):
|
||||
source: str
|
||||
destination: str
|
||||
|
||||
|
||||
class BookmarkCreateRequest(BaseModel):
|
||||
path: str
|
||||
label: str
|
||||
|
||||
|
||||
class BookmarkItem(BaseModel):
|
||||
id: int
|
||||
path: str
|
||||
label: str
|
||||
created_at: str
|
||||
|
||||
|
||||
class BookmarkListResponse(BaseModel):
|
||||
items: list[BookmarkItem]
|
||||
|
||||
|
||||
class BookmarkDeleteResponse(BaseModel):
|
||||
id: int
|
||||
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Settings:
|
||||
root_aliases: dict[str, str]
|
||||
task_db_path: str
|
||||
|
||||
|
||||
DEFAULT_ROOT_ALIASES = {
|
||||
"storage1": "/Volumes/8TB",
|
||||
"storage2": "/Volumes/8TB_RAID1",
|
||||
}
|
||||
|
||||
|
||||
def _load_root_aliases() -> dict[str, str]:
|
||||
# Minimal env override format: storage1=/path1,storage2=/path2
|
||||
raw = os.getenv("WEBMANAGER_ROOT_ALIASES", "").strip()
|
||||
if not raw:
|
||||
return dict(DEFAULT_ROOT_ALIASES)
|
||||
|
||||
parsed: dict[str, str] = {}
|
||||
for entry in raw.split(","):
|
||||
if "=" not in entry:
|
||||
continue
|
||||
alias, root = entry.split("=", 1)
|
||||
alias = alias.strip()
|
||||
root = root.strip()
|
||||
if alias and root:
|
||||
parsed[alias] = root
|
||||
return parsed or dict(DEFAULT_ROOT_ALIASES)
|
||||
|
||||
|
||||
def get_settings() -> Settings:
|
||||
task_db_path = os.getenv("WEBMANAGER_TASK_DB_PATH", "webui/backend/data/tasks.db").strip()
|
||||
return Settings(root_aliases=_load_root_aliases(), task_db_path=task_db_path)
|
||||
@@ -0,0 +1 @@
|
||||
"""Database utilities."""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,94 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class BookmarkRepository:
|
||||
def __init__(self, db_path: str):
|
||||
self._db_path = db_path
|
||||
self._ensure_schema()
|
||||
|
||||
def create_bookmark(self, path: str, label: str) -> dict:
|
||||
created_at = self._now_iso()
|
||||
with self._connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
INSERT INTO bookmarks (path, label, created_at)
|
||||
VALUES (?, ?, ?)
|
||||
""",
|
||||
(path, label, created_at),
|
||||
)
|
||||
bookmark_id = int(cursor.lastrowid)
|
||||
row = conn.execute(
|
||||
"SELECT id, path, label, created_at FROM bookmarks WHERE id = ?",
|
||||
(bookmark_id,),
|
||||
).fetchone()
|
||||
return self._to_dict(row)
|
||||
|
||||
def list_bookmarks(self) -> list[dict]:
|
||||
with self._connection() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT id, path, label, created_at
|
||||
FROM bookmarks
|
||||
ORDER BY created_at DESC
|
||||
"""
|
||||
).fetchall()
|
||||
return [self._to_dict(row) for row in rows]
|
||||
|
||||
def delete_bookmark(self, bookmark_id: int) -> bool:
|
||||
with self._connection() as conn:
|
||||
cursor = conn.execute("DELETE FROM bookmarks WHERE id = ?", (bookmark_id,))
|
||||
return cursor.rowcount > 0
|
||||
|
||||
def _ensure_schema(self) -> None:
|
||||
db_path = Path(self._db_path)
|
||||
if db_path.parent and str(db_path.parent) not in {"", "."}:
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS bookmarks (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
path TEXT NOT NULL UNIQUE,
|
||||
label TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_bookmarks_created_at_desc
|
||||
ON bookmarks(created_at DESC)
|
||||
"""
|
||||
)
|
||||
|
||||
@contextmanager
|
||||
def _connection(self):
|
||||
conn = sqlite3.connect(self._db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def _to_dict(row: sqlite3.Row) -> dict:
|
||||
return {
|
||||
"id": int(row["id"]),
|
||||
"path": row["path"],
|
||||
"label": row["label"],
|
||||
"created_at": row["created_at"],
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _now_iso() -> str:
|
||||
return datetime.now(tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
@@ -0,0 +1,241 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
VALID_STATUSES = {"queued", "running", "completed", "failed"}
|
||||
VALID_OPERATIONS = {"copy", "move"}
|
||||
|
||||
|
||||
class TaskRepository:
|
||||
def __init__(self, db_path: str):
|
||||
self._db_path = db_path
|
||||
self._ensure_schema()
|
||||
|
||||
def create_task(self, operation: str, source: str, destination: str) -> dict:
|
||||
if operation not in VALID_OPERATIONS:
|
||||
raise ValueError("invalid operation")
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
created_at = self._now_iso()
|
||||
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO tasks (
|
||||
id, operation, status, source, destination,
|
||||
done_bytes, total_bytes, done_items, total_items,
|
||||
current_item, failed_item, error_code, error_message,
|
||||
created_at, started_at, finished_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
task_id,
|
||||
operation,
|
||||
"queued",
|
||||
source,
|
||||
destination,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
created_at,
|
||||
None,
|
||||
None,
|
||||
),
|
||||
)
|
||||
row = conn.execute("SELECT * FROM tasks WHERE id = ?", (task_id,)).fetchone()
|
||||
|
||||
return self._to_dict(row)
|
||||
|
||||
def insert_task_for_testing(self, task: dict) -> None:
|
||||
status = task["status"]
|
||||
operation = task["operation"]
|
||||
if status not in VALID_STATUSES:
|
||||
raise ValueError("invalid status")
|
||||
if operation not in VALID_OPERATIONS:
|
||||
raise ValueError("invalid operation")
|
||||
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO tasks (
|
||||
id, operation, status, source, destination,
|
||||
done_bytes, total_bytes, done_items, total_items,
|
||||
current_item, failed_item, error_code, error_message,
|
||||
created_at, started_at, finished_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
task["id"],
|
||||
operation,
|
||||
status,
|
||||
task["source"],
|
||||
task["destination"],
|
||||
task.get("done_bytes"),
|
||||
task.get("total_bytes"),
|
||||
task.get("done_items"),
|
||||
task.get("total_items"),
|
||||
task.get("current_item"),
|
||||
task.get("failed_item"),
|
||||
task.get("error_code"),
|
||||
task.get("error_message"),
|
||||
task["created_at"],
|
||||
task.get("started_at"),
|
||||
task.get("finished_at"),
|
||||
),
|
||||
)
|
||||
|
||||
def get_task(self, task_id: str) -> dict | None:
|
||||
with self._connection() as conn:
|
||||
row = conn.execute("SELECT * FROM tasks WHERE id = ?", (task_id,)).fetchone()
|
||||
return self._to_dict(row) if row else None
|
||||
|
||||
def list_tasks(self) -> list[dict]:
|
||||
with self._connection() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT * FROM tasks
|
||||
ORDER BY created_at DESC
|
||||
"""
|
||||
).fetchall()
|
||||
return [self._to_dict(row) for row in rows]
|
||||
|
||||
def mark_running(self, task_id: str, done_bytes: int, total_bytes: int | None, current_item: str | None) -> None:
|
||||
started_at = self._now_iso()
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
UPDATE tasks
|
||||
SET status = ?, started_at = ?, done_bytes = ?, total_bytes = ?, current_item = ?
|
||||
WHERE id = ?
|
||||
""",
|
||||
("running", started_at, done_bytes, total_bytes, current_item, task_id),
|
||||
)
|
||||
|
||||
def update_progress(self, task_id: str, done_bytes: int, total_bytes: int | None, current_item: str | None) -> None:
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
UPDATE tasks
|
||||
SET done_bytes = ?, total_bytes = ?, current_item = ?
|
||||
WHERE id = ?
|
||||
""",
|
||||
(done_bytes, total_bytes, current_item, task_id),
|
||||
)
|
||||
|
||||
def mark_completed(self, task_id: str, done_bytes: int | None, total_bytes: int | None) -> None:
|
||||
finished_at = self._now_iso()
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
UPDATE tasks
|
||||
SET status = ?, finished_at = ?, done_bytes = ?, total_bytes = ?
|
||||
WHERE id = ?
|
||||
""",
|
||||
("completed", finished_at, done_bytes, total_bytes, task_id),
|
||||
)
|
||||
|
||||
def mark_failed(
|
||||
self,
|
||||
task_id: str,
|
||||
error_code: str,
|
||||
error_message: str,
|
||||
failed_item: str | None,
|
||||
done_bytes: int | None,
|
||||
total_bytes: int | None,
|
||||
) -> None:
|
||||
finished_at = self._now_iso()
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
UPDATE tasks
|
||||
SET status = ?, finished_at = ?, error_code = ?, error_message = ?, failed_item = ?, done_bytes = ?, total_bytes = ?
|
||||
WHERE id = ?
|
||||
""",
|
||||
("failed", finished_at, error_code, error_message, failed_item, done_bytes, total_bytes, task_id),
|
||||
)
|
||||
|
||||
def _ensure_schema(self) -> None:
|
||||
db_path = Path(self._db_path)
|
||||
if db_path.parent and str(db_path.parent) not in {"", "."}:
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS tasks (
|
||||
id TEXT PRIMARY KEY,
|
||||
operation TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
destination TEXT NOT NULL,
|
||||
done_bytes INTEGER NULL,
|
||||
total_bytes INTEGER NULL,
|
||||
done_items INTEGER NULL,
|
||||
total_items INTEGER NULL,
|
||||
current_item TEXT NULL,
|
||||
failed_item TEXT NULL,
|
||||
error_code TEXT NULL,
|
||||
error_message TEXT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
started_at TEXT NULL,
|
||||
finished_at TEXT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_tasks_created_at_desc
|
||||
ON tasks(created_at DESC)
|
||||
"""
|
||||
)
|
||||
|
||||
def _connect(self) -> sqlite3.Connection:
|
||||
conn = sqlite3.connect(self._db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
return conn
|
||||
|
||||
@contextmanager
|
||||
def _connection(self):
|
||||
conn = self._connect()
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def _to_dict(row: sqlite3.Row) -> dict:
|
||||
return {
|
||||
"id": row["id"],
|
||||
"operation": row["operation"],
|
||||
"status": row["status"],
|
||||
"source": row["source"],
|
||||
"destination": row["destination"],
|
||||
"done_bytes": row["done_bytes"],
|
||||
"total_bytes": row["total_bytes"],
|
||||
"done_items": row["done_items"],
|
||||
"total_items": row["total_items"],
|
||||
"current_item": row["current_item"],
|
||||
"failed_item": row["failed_item"],
|
||||
"error_code": row["error_code"],
|
||||
"error_message": row["error_message"],
|
||||
"created_at": row["created_at"],
|
||||
"started_at": row["started_at"],
|
||||
"finished_at": row["finished_at"],
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _now_iso() -> str:
|
||||
return datetime.now(tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
@@ -0,0 +1,75 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import lru_cache
|
||||
|
||||
from backend.app.config import Settings, get_settings
|
||||
from backend.app.db.bookmark_repository import BookmarkRepository
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.bookmark_service import BookmarkService
|
||||
from backend.app.services.browse_service import BrowseService
|
||||
from backend.app.services.copy_task_service import CopyTaskService
|
||||
from backend.app.services.file_ops_service import FileOpsService
|
||||
from backend.app.services.move_task_service import MoveTaskService
|
||||
from backend.app.services.task_service import TaskService
|
||||
from backend.app.tasks_runner import TaskRunner
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_path_guard() -> PathGuard:
|
||||
settings: Settings = get_settings()
|
||||
return PathGuard(root_aliases=settings.root_aliases)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_filesystem_adapter() -> FilesystemAdapter:
|
||||
return FilesystemAdapter()
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_task_repository() -> TaskRepository:
|
||||
settings: Settings = get_settings()
|
||||
return TaskRepository(db_path=settings.task_db_path)
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_bookmark_repository() -> BookmarkRepository:
|
||||
settings: Settings = get_settings()
|
||||
return BookmarkRepository(db_path=settings.task_db_path)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_task_runner() -> TaskRunner:
|
||||
return TaskRunner(repository=get_task_repository(), filesystem=get_filesystem_adapter())
|
||||
|
||||
|
||||
async def get_browse_service() -> BrowseService:
|
||||
return BrowseService(path_guard=get_path_guard(), filesystem=get_filesystem_adapter())
|
||||
|
||||
|
||||
async def get_file_ops_service() -> FileOpsService:
|
||||
return FileOpsService(path_guard=get_path_guard(), filesystem=get_filesystem_adapter())
|
||||
|
||||
|
||||
async def get_task_service() -> TaskService:
|
||||
return TaskService(repository=get_task_repository())
|
||||
|
||||
|
||||
async def get_copy_task_service() -> CopyTaskService:
|
||||
return CopyTaskService(
|
||||
path_guard=get_path_guard(),
|
||||
repository=get_task_repository(),
|
||||
runner=get_task_runner(),
|
||||
)
|
||||
|
||||
|
||||
async def get_move_task_service() -> MoveTaskService:
|
||||
return MoveTaskService(
|
||||
path_guard=get_path_guard(),
|
||||
repository=get_task_repository(),
|
||||
runner=get_task_runner(),
|
||||
)
|
||||
|
||||
|
||||
async def get_bookmark_service() -> BookmarkService:
|
||||
return BookmarkService(path_guard=get_path_guard(), repository=get_bookmark_repository())
|
||||
@@ -0,0 +1 @@
|
||||
"""Filesystem access layer."""
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,61 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class FilesystemAdapter:
|
||||
def list_directory(self, directory: Path, show_hidden: bool) -> tuple[list[dict], list[dict]]:
|
||||
directories: list[dict] = []
|
||||
files: list[dict] = []
|
||||
|
||||
for entry in sorted(directory.iterdir(), key=lambda item: item.name.lower()):
|
||||
if not show_hidden and entry.name.startswith("."):
|
||||
continue
|
||||
stat = entry.stat()
|
||||
modified = datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
if entry.is_dir():
|
||||
directories.append({"name": entry.name, "modified": modified, "absolute": entry})
|
||||
elif entry.is_file():
|
||||
files.append(
|
||||
{
|
||||
"name": entry.name,
|
||||
"size": int(stat.st_size),
|
||||
"modified": modified,
|
||||
"absolute": entry,
|
||||
}
|
||||
)
|
||||
|
||||
return directories, files
|
||||
|
||||
def make_directory(self, path: Path) -> None:
|
||||
path.mkdir(parents=False, exist_ok=False)
|
||||
|
||||
def rename_path(self, source: Path, destination: Path) -> None:
|
||||
source.rename(destination)
|
||||
|
||||
def move_file(self, source: str, destination: str) -> None:
|
||||
Path(source).rename(Path(destination))
|
||||
|
||||
def is_directory_empty(self, path: Path) -> bool:
|
||||
return not any(path.iterdir())
|
||||
|
||||
def delete_file(self, path: Path) -> None:
|
||||
path.unlink()
|
||||
|
||||
def delete_empty_directory(self, path: Path) -> None:
|
||||
path.rmdir()
|
||||
|
||||
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
||||
src = Path(source)
|
||||
dst = Path(destination)
|
||||
with src.open("rb") as in_f, dst.open("xb") as out_f:
|
||||
while True:
|
||||
chunk = in_f.read(1024 * 1024)
|
||||
if not chunk:
|
||||
break
|
||||
out_f.write(chunk)
|
||||
if on_progress:
|
||||
on_progress(out_f.tell())
|
||||
shutil.copystat(src, dst, follow_symlinks=False)
|
||||
@@ -0,0 +1,8 @@
|
||||
import logging
|
||||
|
||||
|
||||
def configure_logging() -> None:
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s %(levelname)s %(name)s %(message)s",
|
||||
)
|
||||
@@ -0,0 +1,51 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.routes_bookmarks import router as bookmarks_router
|
||||
from backend.app.api.routes_browse import router as browse_router
|
||||
from backend.app.api.routes_copy import router as copy_router
|
||||
from backend.app.api.routes_files import router as files_router
|
||||
from backend.app.api.routes_move import router as move_router
|
||||
from backend.app.api.routes_tasks import router as tasks_router
|
||||
from backend.app.logging import configure_logging
|
||||
|
||||
configure_logging()
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parents[3]
|
||||
UI_DIR = Path(__file__).resolve().parents[2] / "html"
|
||||
if not UI_DIR.exists():
|
||||
raise RuntimeError(f"UI directory does not exist: {UI_DIR}")
|
||||
|
||||
app = FastAPI(title="WebManager MVP Backend")
|
||||
app.mount("/ui", StaticFiles(directory=str(UI_DIR), html=True), name="ui")
|
||||
app.include_router(browse_router, prefix="/api")
|
||||
app.include_router(files_router, prefix="/api")
|
||||
app.include_router(copy_router, prefix="/api")
|
||||
app.include_router(move_router, prefix="/api")
|
||||
app.include_router(bookmarks_router, prefix="/api")
|
||||
app.include_router(tasks_router, prefix="/api")
|
||||
|
||||
|
||||
@app.exception_handler(AppError)
|
||||
async def handle_app_error(_: Request, exc: AppError) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={
|
||||
"error": {
|
||||
"code": exc.code,
|
||||
"message": exc.message,
|
||||
"details": exc.details,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def read_root() -> dict[str, str]:
|
||||
return {"status": "ok"}
|
||||
@@ -0,0 +1 @@
|
||||
"""Security helpers."""
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,139 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ResolvedPath:
|
||||
alias: str
|
||||
relative: str
|
||||
absolute: Path
|
||||
|
||||
|
||||
class PathGuard:
|
||||
def __init__(self, root_aliases: dict[str, str]):
|
||||
normalized: dict[str, Path] = {}
|
||||
for alias, root in root_aliases.items():
|
||||
normalized[alias] = Path(root).resolve()
|
||||
self._roots = normalized
|
||||
|
||||
def resolve_directory_path(self, input_path: str) -> ResolvedPath:
|
||||
resolved = self.resolve_path(input_path)
|
||||
if not resolved.absolute.exists():
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Requested path was not found",
|
||||
status_code=404,
|
||||
details={"path": input_path},
|
||||
)
|
||||
if not resolved.absolute.is_dir():
|
||||
raise AppError(
|
||||
code="path_type_conflict",
|
||||
message="Requested path is not a directory",
|
||||
status_code=409,
|
||||
details={"path": input_path},
|
||||
)
|
||||
return resolved
|
||||
|
||||
def resolve_existing_path(self, input_path: str) -> ResolvedPath:
|
||||
resolved = self.resolve_path(input_path)
|
||||
if not resolved.absolute.exists():
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Requested path was not found",
|
||||
status_code=404,
|
||||
details={"path": input_path},
|
||||
)
|
||||
return resolved
|
||||
|
||||
def resolve_path(self, input_path: str) -> ResolvedPath:
|
||||
alias, rel_segments, candidate = self.resolve_lexical_path(input_path)
|
||||
root = self._roots[alias]
|
||||
|
||||
# Resolve symlinks for existing prefixes; for not-yet-existing tails strict=False keeps
|
||||
# path normalization while still enabling containment check.
|
||||
resolved_candidate = candidate.resolve(strict=False)
|
||||
if not self._is_under_root(resolved_candidate, root):
|
||||
raise AppError(
|
||||
code="path_outside_whitelist",
|
||||
message="Requested path is outside allowed roots",
|
||||
status_code=403,
|
||||
details={"path": input_path},
|
||||
)
|
||||
|
||||
return ResolvedPath(
|
||||
alias=alias,
|
||||
relative=self._format_relative(alias, rel_segments),
|
||||
absolute=resolved_candidate,
|
||||
)
|
||||
|
||||
def resolve_lexical_path(self, input_path: str) -> tuple[str, list[str], Path]:
|
||||
normalized_input = (input_path or "").strip().strip("/")
|
||||
if not normalized_input:
|
||||
raise AppError(
|
||||
code="invalid_request",
|
||||
message="Query parameter 'path' is required",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
segments = [seg for seg in normalized_input.split("/") if seg]
|
||||
alias = segments[0] if segments else ""
|
||||
if alias not in self._roots:
|
||||
raise AppError(
|
||||
code="invalid_root_alias",
|
||||
message="Unknown root alias",
|
||||
status_code=403,
|
||||
details={"path": input_path},
|
||||
)
|
||||
|
||||
rel_segments = segments[1:]
|
||||
if any(seg == ".." for seg in rel_segments):
|
||||
raise AppError(
|
||||
code="path_traversal_detected",
|
||||
message="Path traversal is not allowed",
|
||||
status_code=403,
|
||||
details={"path": input_path},
|
||||
)
|
||||
|
||||
root = self._roots[alias]
|
||||
candidate = root.joinpath(*rel_segments)
|
||||
return alias, rel_segments, candidate
|
||||
|
||||
def validate_name(self, name: str, field: str) -> str:
|
||||
normalized = (name or "").strip()
|
||||
if not normalized or normalized in {".", ".."} or "/" in normalized or "\\" in normalized:
|
||||
raise AppError(
|
||||
code="invalid_request",
|
||||
message="Invalid name",
|
||||
status_code=400,
|
||||
details={field: name},
|
||||
)
|
||||
return normalized
|
||||
|
||||
def entry_relative_path(self, alias: str, absolute: Path) -> str:
|
||||
root = self._roots[alias]
|
||||
resolved_absolute = absolute.resolve(strict=False)
|
||||
if not self._is_under_root(resolved_absolute, root):
|
||||
raise AppError(
|
||||
code="symlink_escape_detected",
|
||||
message="Entry resolves outside allowed root",
|
||||
status_code=403,
|
||||
details={"path": f"{alias}"},
|
||||
)
|
||||
rel = resolved_absolute.relative_to(root).as_posix()
|
||||
return self._format_relative(alias, [p for p in rel.split("/") if p])
|
||||
|
||||
@staticmethod
|
||||
def _is_under_root(path: Path, root: Path) -> bool:
|
||||
try:
|
||||
path.relative_to(root)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _format_relative(alias: str, rel_segments: list[str]) -> str:
|
||||
return alias if not rel_segments else f"{alias}/{'/'.join(rel_segments)}"
|
||||
@@ -0,0 +1 @@
|
||||
"""Service layer."""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,53 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.schemas import BookmarkDeleteResponse, BookmarkItem, BookmarkListResponse
|
||||
from backend.app.db.bookmark_repository import BookmarkRepository
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
|
||||
|
||||
class BookmarkService:
|
||||
def __init__(self, path_guard: PathGuard, repository: BookmarkRepository):
|
||||
self._path_guard = path_guard
|
||||
self._repository = repository
|
||||
|
||||
def create_bookmark(self, path: str, label: str) -> BookmarkItem:
|
||||
normalized_label = (label or "").strip()
|
||||
if not normalized_label:
|
||||
raise AppError(
|
||||
code="invalid_request",
|
||||
message="Label is required",
|
||||
status_code=400,
|
||||
details={"label": label},
|
||||
)
|
||||
|
||||
resolved = self._path_guard.resolve_path(path)
|
||||
|
||||
try:
|
||||
bookmark = self._repository.create_bookmark(path=resolved.relative, label=normalized_label)
|
||||
except sqlite3.IntegrityError:
|
||||
raise AppError(
|
||||
code="already_exists",
|
||||
message="Bookmark already exists for path",
|
||||
status_code=409,
|
||||
details={"path": resolved.relative},
|
||||
)
|
||||
|
||||
return BookmarkItem(**bookmark)
|
||||
|
||||
def list_bookmarks(self) -> BookmarkListResponse:
|
||||
items = [BookmarkItem(**row) for row in self._repository.list_bookmarks()]
|
||||
return BookmarkListResponse(items=items)
|
||||
|
||||
def delete_bookmark(self, bookmark_id: int) -> BookmarkDeleteResponse:
|
||||
deleted = self._repository.delete_bookmark(bookmark_id)
|
||||
if not deleted:
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Bookmark was not found",
|
||||
status_code=404,
|
||||
details={"bookmark_id": str(bookmark_id)},
|
||||
)
|
||||
return BookmarkDeleteResponse(id=bookmark_id)
|
||||
@@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from backend.app.api.schemas import BrowseResponse, DirectoryEntry, FileEntry
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
|
||||
|
||||
class BrowseService:
|
||||
def __init__(self, path_guard: PathGuard, filesystem: FilesystemAdapter):
|
||||
self._path_guard = path_guard
|
||||
self._filesystem = filesystem
|
||||
|
||||
def browse(self, path: str, show_hidden: bool) -> BrowseResponse:
|
||||
resolved = self._path_guard.resolve_directory_path(path)
|
||||
directories_raw, files_raw = self._filesystem.list_directory(resolved.absolute, show_hidden=show_hidden)
|
||||
|
||||
directories = [
|
||||
DirectoryEntry(
|
||||
name=item["name"],
|
||||
path=self._path_guard.entry_relative_path(resolved.alias, item["absolute"]),
|
||||
modified=item["modified"],
|
||||
)
|
||||
for item in directories_raw
|
||||
]
|
||||
|
||||
files = [
|
||||
FileEntry(
|
||||
name=item["name"],
|
||||
path=self._path_guard.entry_relative_path(resolved.alias, item["absolute"]),
|
||||
size=item["size"],
|
||||
modified=item["modified"],
|
||||
)
|
||||
for item in files_raw
|
||||
]
|
||||
|
||||
return BrowseResponse(path=resolved.relative, directories=directories, files=files)
|
||||
@@ -0,0 +1,77 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.schemas import TaskCreateResponse
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.tasks_runner import TaskRunner
|
||||
|
||||
|
||||
class CopyTaskService:
|
||||
def __init__(self, path_guard: PathGuard, repository: TaskRepository, runner: TaskRunner):
|
||||
self._path_guard = path_guard
|
||||
self._repository = repository
|
||||
self._runner = runner
|
||||
|
||||
def create_copy_task(self, source: str, destination: str) -> TaskCreateResponse:
|
||||
resolved_source = self._path_guard.resolve_existing_path(source)
|
||||
_, _, lexical_source = self._path_guard.resolve_lexical_path(source)
|
||||
if lexical_source.is_symlink():
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Source must be a regular file",
|
||||
status_code=409,
|
||||
details={"path": source},
|
||||
)
|
||||
if not resolved_source.absolute.is_file():
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Source must be a file",
|
||||
status_code=409,
|
||||
details={"path": source},
|
||||
)
|
||||
|
||||
resolved_destination = self._path_guard.resolve_path(destination)
|
||||
|
||||
destination_parent = resolved_destination.absolute.parent
|
||||
parent_relative = self._path_guard.entry_relative_path(resolved_destination.alias, destination_parent)
|
||||
self._map_directory_validation(parent_relative)
|
||||
|
||||
if resolved_destination.absolute.exists():
|
||||
raise AppError(
|
||||
code="already_exists",
|
||||
message="Target path already exists",
|
||||
status_code=409,
|
||||
details={"path": resolved_destination.relative},
|
||||
)
|
||||
|
||||
total_bytes = int(resolved_source.absolute.stat().st_size)
|
||||
task = self._repository.create_task(
|
||||
operation="copy",
|
||||
source=resolved_source.relative,
|
||||
destination=resolved_destination.relative,
|
||||
)
|
||||
|
||||
self._runner.enqueue_copy_file(
|
||||
task_id=task["id"],
|
||||
source=str(resolved_source.absolute),
|
||||
destination=str(resolved_destination.absolute),
|
||||
total_bytes=total_bytes,
|
||||
)
|
||||
|
||||
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||
|
||||
def _map_directory_validation(self, relative_path: str) -> None:
|
||||
try:
|
||||
self._path_guard.resolve_directory_path(relative_path)
|
||||
except AppError as exc:
|
||||
if exc.code == "path_type_conflict":
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Destination parent is not a directory",
|
||||
status_code=409,
|
||||
details=exc.details,
|
||||
)
|
||||
raise
|
||||
@@ -0,0 +1,134 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.schemas import DeleteResponse, MkdirResponse, RenameResponse
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
|
||||
|
||||
class FileOpsService:
|
||||
def __init__(self, path_guard: PathGuard, filesystem: FilesystemAdapter):
|
||||
self._path_guard = path_guard
|
||||
self._filesystem = filesystem
|
||||
|
||||
def mkdir(self, parent_path: str, name: str) -> MkdirResponse:
|
||||
resolved_parent = self._path_guard.resolve_directory_path(parent_path)
|
||||
safe_name = self._path_guard.validate_name(name, field="name")
|
||||
target_relative = self._join_relative(resolved_parent.relative, safe_name)
|
||||
resolved_target = self._path_guard.resolve_path(target_relative)
|
||||
|
||||
if resolved_target.absolute.exists():
|
||||
raise AppError(
|
||||
code="already_exists",
|
||||
message="Target path already exists",
|
||||
status_code=409,
|
||||
details={"path": resolved_target.relative},
|
||||
)
|
||||
|
||||
try:
|
||||
self._filesystem.make_directory(resolved_target.absolute)
|
||||
except FileExistsError:
|
||||
raise AppError(
|
||||
code="already_exists",
|
||||
message="Target path already exists",
|
||||
status_code=409,
|
||||
details={"path": resolved_target.relative},
|
||||
)
|
||||
except OSError as exc:
|
||||
raise AppError(
|
||||
code="io_error",
|
||||
message="Filesystem operation failed",
|
||||
status_code=500,
|
||||
details={"reason": str(exc)},
|
||||
)
|
||||
|
||||
return MkdirResponse(path=resolved_target.relative)
|
||||
|
||||
def rename(self, path: str, new_name: str) -> RenameResponse:
|
||||
resolved_source = self._path_guard.resolve_existing_path(path)
|
||||
safe_name = self._path_guard.validate_name(new_name, field="new_name")
|
||||
|
||||
parent_relative = self._path_guard.entry_relative_path(resolved_source.alias, resolved_source.absolute.parent)
|
||||
target_relative = self._join_relative(parent_relative, safe_name)
|
||||
resolved_target = self._path_guard.resolve_path(target_relative)
|
||||
|
||||
if resolved_target.absolute.exists():
|
||||
raise AppError(
|
||||
code="already_exists",
|
||||
message="Target path already exists",
|
||||
status_code=409,
|
||||
details={"path": resolved_target.relative},
|
||||
)
|
||||
|
||||
try:
|
||||
self._filesystem.rename_path(resolved_source.absolute, resolved_target.absolute)
|
||||
except FileNotFoundError:
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Requested path was not found",
|
||||
status_code=404,
|
||||
details={"path": path},
|
||||
)
|
||||
except FileExistsError:
|
||||
raise AppError(
|
||||
code="already_exists",
|
||||
message="Target path already exists",
|
||||
status_code=409,
|
||||
details={"path": resolved_target.relative},
|
||||
)
|
||||
except OSError as exc:
|
||||
raise AppError(
|
||||
code="io_error",
|
||||
message="Filesystem operation failed",
|
||||
status_code=500,
|
||||
details={"reason": str(exc)},
|
||||
)
|
||||
|
||||
return RenameResponse(path=resolved_target.relative)
|
||||
|
||||
def delete(self, path: str) -> DeleteResponse:
|
||||
resolved_target = self._path_guard.resolve_existing_path(path)
|
||||
|
||||
try:
|
||||
if resolved_target.absolute.is_file():
|
||||
self._filesystem.delete_file(resolved_target.absolute)
|
||||
elif resolved_target.absolute.is_dir():
|
||||
if not self._filesystem.is_directory_empty(resolved_target.absolute):
|
||||
raise AppError(
|
||||
code="directory_not_empty",
|
||||
message="Directory is not empty",
|
||||
status_code=409,
|
||||
details={"path": resolved_target.relative},
|
||||
)
|
||||
self._filesystem.delete_empty_directory(resolved_target.absolute)
|
||||
else:
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Unsupported path type for delete",
|
||||
status_code=409,
|
||||
details={"path": resolved_target.relative},
|
||||
)
|
||||
except AppError:
|
||||
raise
|
||||
except FileNotFoundError:
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Requested path was not found",
|
||||
status_code=404,
|
||||
details={"path": path},
|
||||
)
|
||||
except OSError as exc:
|
||||
raise AppError(
|
||||
code="io_error",
|
||||
message="Filesystem operation failed",
|
||||
status_code=500,
|
||||
details={"reason": str(exc)},
|
||||
)
|
||||
|
||||
return DeleteResponse(path=resolved_target.relative)
|
||||
|
||||
@staticmethod
|
||||
def _join_relative(base: str, name: str) -> str:
|
||||
return f"{base}/{name}" if base else name
|
||||
@@ -0,0 +1,77 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.schemas import TaskCreateResponse
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.tasks_runner import TaskRunner
|
||||
|
||||
|
||||
class MoveTaskService:
|
||||
def __init__(self, path_guard: PathGuard, repository: TaskRepository, runner: TaskRunner):
|
||||
self._path_guard = path_guard
|
||||
self._repository = repository
|
||||
self._runner = runner
|
||||
|
||||
def create_move_task(self, source: str, destination: str) -> TaskCreateResponse:
|
||||
resolved_source = self._path_guard.resolve_existing_path(source)
|
||||
_, _, lexical_source = self._path_guard.resolve_lexical_path(source)
|
||||
|
||||
if lexical_source.is_symlink():
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Source must be a regular file",
|
||||
status_code=409,
|
||||
details={"path": source},
|
||||
)
|
||||
if not resolved_source.absolute.is_file():
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Source must be a file",
|
||||
status_code=409,
|
||||
details={"path": source},
|
||||
)
|
||||
|
||||
resolved_destination = self._path_guard.resolve_path(destination)
|
||||
destination_parent = resolved_destination.absolute.parent
|
||||
parent_relative = self._path_guard.entry_relative_path(resolved_destination.alias, destination_parent)
|
||||
self._map_directory_validation(parent_relative)
|
||||
|
||||
if resolved_destination.absolute.exists():
|
||||
raise AppError(
|
||||
code="already_exists",
|
||||
message="Target path already exists",
|
||||
status_code=409,
|
||||
details={"path": resolved_destination.relative},
|
||||
)
|
||||
|
||||
total_bytes = int(resolved_source.absolute.stat().st_size)
|
||||
task = self._repository.create_task(
|
||||
operation="move",
|
||||
source=resolved_source.relative,
|
||||
destination=resolved_destination.relative,
|
||||
)
|
||||
|
||||
same_root = resolved_source.alias == resolved_destination.alias
|
||||
self._runner.enqueue_move_file(
|
||||
task_id=task["id"],
|
||||
source=str(resolved_source.absolute),
|
||||
destination=str(resolved_destination.absolute),
|
||||
total_bytes=total_bytes,
|
||||
same_root=same_root,
|
||||
)
|
||||
|
||||
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||
|
||||
def _map_directory_validation(self, relative_path: str) -> None:
|
||||
try:
|
||||
self._path_guard.resolve_directory_path(relative_path)
|
||||
except AppError as exc:
|
||||
if exc.code == "path_type_conflict":
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Destination parent is not a directory",
|
||||
status_code=409,
|
||||
details=exc.details,
|
||||
)
|
||||
raise
|
||||
@@ -0,0 +1,42 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.schemas import TaskDetailResponse, TaskListItem, TaskListResponse
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
|
||||
|
||||
class TaskService:
|
||||
def __init__(self, repository: TaskRepository):
|
||||
self._repository = repository
|
||||
|
||||
def create_task(self, operation: str, source: str, destination: str) -> TaskDetailResponse:
|
||||
task = self._repository.create_task(operation=operation, source=source, destination=destination)
|
||||
return TaskDetailResponse(**task)
|
||||
|
||||
def get_task(self, task_id: str) -> TaskDetailResponse:
|
||||
task = self._repository.get_task(task_id)
|
||||
if not task:
|
||||
raise AppError(
|
||||
code="task_not_found",
|
||||
message="Task was not found",
|
||||
status_code=404,
|
||||
details={"task_id": task_id},
|
||||
)
|
||||
return TaskDetailResponse(**task)
|
||||
|
||||
def list_tasks(self) -> TaskListResponse:
|
||||
tasks = self._repository.list_tasks()
|
||||
return TaskListResponse(
|
||||
items=[
|
||||
TaskListItem(
|
||||
id=task["id"],
|
||||
operation=task["operation"],
|
||||
status=task["status"],
|
||||
source=task["source"],
|
||||
destination=task["destination"],
|
||||
created_at=task["created_at"],
|
||||
finished_at=task["finished_at"],
|
||||
)
|
||||
for task in tasks
|
||||
]
|
||||
)
|
||||
@@ -0,0 +1,125 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
|
||||
|
||||
class TaskRunner:
|
||||
def __init__(self, repository: TaskRepository, filesystem: FilesystemAdapter):
|
||||
self._repository = repository
|
||||
self._filesystem = filesystem
|
||||
|
||||
def enqueue_copy_file(self, task_id: str, source: str, destination: str, total_bytes: int) -> None:
|
||||
thread = threading.Thread(
|
||||
target=self._run_copy_file,
|
||||
args=(task_id, source, destination, total_bytes),
|
||||
daemon=True,
|
||||
)
|
||||
thread.start()
|
||||
|
||||
def enqueue_move_file(
|
||||
self,
|
||||
task_id: str,
|
||||
source: str,
|
||||
destination: str,
|
||||
total_bytes: int,
|
||||
same_root: bool,
|
||||
) -> None:
|
||||
thread = threading.Thread(
|
||||
target=self._run_move_file,
|
||||
args=(task_id, source, destination, total_bytes, same_root),
|
||||
daemon=True,
|
||||
)
|
||||
thread.start()
|
||||
|
||||
def _run_copy_file(self, task_id: str, source: str, destination: str, total_bytes: int) -> None:
|
||||
self._repository.mark_running(
|
||||
task_id=task_id,
|
||||
done_bytes=0,
|
||||
total_bytes=total_bytes,
|
||||
current_item=source,
|
||||
)
|
||||
|
||||
progress = {"done": 0}
|
||||
|
||||
def on_progress(done_bytes: int) -> None:
|
||||
progress["done"] = done_bytes
|
||||
self._repository.update_progress(
|
||||
task_id=task_id,
|
||||
done_bytes=done_bytes,
|
||||
total_bytes=total_bytes,
|
||||
current_item=source,
|
||||
)
|
||||
|
||||
try:
|
||||
self._filesystem.copy_file(source=source, destination=destination, on_progress=on_progress)
|
||||
self._repository.mark_completed(
|
||||
task_id=task_id,
|
||||
done_bytes=total_bytes,
|
||||
total_bytes=total_bytes,
|
||||
)
|
||||
except OSError as exc:
|
||||
self._repository.mark_failed(
|
||||
task_id=task_id,
|
||||
error_code="io_error",
|
||||
error_message=str(exc),
|
||||
failed_item=source,
|
||||
done_bytes=progress["done"],
|
||||
total_bytes=total_bytes,
|
||||
)
|
||||
|
||||
def _run_move_file(
|
||||
self,
|
||||
task_id: str,
|
||||
source: str,
|
||||
destination: str,
|
||||
total_bytes: int,
|
||||
same_root: bool,
|
||||
) -> None:
|
||||
self._repository.mark_running(
|
||||
task_id=task_id,
|
||||
done_bytes=0,
|
||||
total_bytes=total_bytes,
|
||||
current_item=source,
|
||||
)
|
||||
|
||||
progress = {"done": 0}
|
||||
|
||||
try:
|
||||
if same_root:
|
||||
self._filesystem.move_file(source=source, destination=destination)
|
||||
self._repository.mark_completed(
|
||||
task_id=task_id,
|
||||
done_bytes=total_bytes,
|
||||
total_bytes=total_bytes,
|
||||
)
|
||||
return
|
||||
|
||||
def on_progress(done_bytes: int) -> None:
|
||||
progress["done"] = done_bytes
|
||||
self._repository.update_progress(
|
||||
task_id=task_id,
|
||||
done_bytes=done_bytes,
|
||||
total_bytes=total_bytes,
|
||||
current_item=source,
|
||||
)
|
||||
|
||||
self._filesystem.copy_file(source=source, destination=destination, on_progress=on_progress)
|
||||
self._filesystem.delete_file(Path(source))
|
||||
self._repository.mark_completed(
|
||||
task_id=task_id,
|
||||
done_bytes=total_bytes,
|
||||
total_bytes=total_bytes,
|
||||
)
|
||||
except OSError as exc:
|
||||
self._repository.mark_failed(
|
||||
task_id=task_id,
|
||||
error_code="io_error",
|
||||
error_message=str(exc),
|
||||
failed_item=source,
|
||||
done_bytes=progress["done"],
|
||||
total_bytes=total_bytes,
|
||||
)
|
||||
@@ -0,0 +1,3 @@
|
||||
from backend.app.main import app
|
||||
|
||||
__all__ = ["app"]
|
||||
@@ -0,0 +1,6 @@
|
||||
fastapi==0.111.0
|
||||
starlette==0.37.2
|
||||
pydantic==2.12.5
|
||||
httpx==0.27.2
|
||||
anyio==4.4.0
|
||||
sniffio==1.3.1
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,157 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_bookmark_service
|
||||
from backend.app.db.bookmark_repository import BookmarkRepository
|
||||
from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.bookmark_service import BookmarkService
|
||||
|
||||
|
||||
class BookmarksApiGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.root = Path(self.temp_dir.name) / "root"
|
||||
self.root.mkdir(parents=True, exist_ok=True)
|
||||
self.repo = BookmarkRepository(str(Path(self.temp_dir.name) / "bookmarks.db"))
|
||||
|
||||
path_guard = PathGuard({"storage1": str(self.root)})
|
||||
service = BookmarkService(path_guard=path_guard, repository=self.repo)
|
||||
|
||||
async def _override_bookmark_service() -> BookmarkService:
|
||||
return service
|
||||
|
||||
app.dependency_overrides[get_bookmark_service] = _override_bookmark_service
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _request(self, method: str, url: str, payload: dict | None = None) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
if method == "POST":
|
||||
return await client.post(url, json=payload)
|
||||
if method == "DELETE":
|
||||
return await client.delete(url)
|
||||
return await client.get(url)
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
def test_create_success(self) -> None:
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/bookmarks",
|
||||
{"path": "storage1/my/path", "label": "My Path"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = response.json()
|
||||
self.assertEqual(body["path"], "storage1/my/path")
|
||||
self.assertEqual(body["label"], "My Path")
|
||||
self.assertIn("id", body)
|
||||
self.assertIn("created_at", body)
|
||||
|
||||
def test_list_shape(self) -> None:
|
||||
self._request(
|
||||
"POST",
|
||||
"/api/bookmarks",
|
||||
{"path": "storage1/a", "label": "A"},
|
||||
)
|
||||
|
||||
response = self._request("GET", "/api/bookmarks")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(response.json()["items"]), 1)
|
||||
item = response.json()["items"][0]
|
||||
self.assertEqual(set(item.keys()), {"id", "path", "label", "created_at"})
|
||||
|
||||
def test_delete_success(self) -> None:
|
||||
created = self._request(
|
||||
"POST",
|
||||
"/api/bookmarks",
|
||||
{"path": "storage1/a", "label": "A"},
|
||||
).json()
|
||||
|
||||
response = self._request("DELETE", f"/api/bookmarks/{created['id']}")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json(), {"id": created["id"]})
|
||||
|
||||
def test_invalid_path(self) -> None:
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/bookmarks",
|
||||
{"path": "unknown/path", "label": "A"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(response.json()["error"]["code"], "invalid_root_alias")
|
||||
|
||||
def test_invalid_label(self) -> None:
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/bookmarks",
|
||||
{"path": "storage1/a", "label": " "},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "invalid_request",
|
||||
"message": "Label is required",
|
||||
"details": {"label": " "},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_duplicate_conflict(self) -> None:
|
||||
self._request(
|
||||
"POST",
|
||||
"/api/bookmarks",
|
||||
{"path": "storage1/a", "label": "A"},
|
||||
)
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/bookmarks",
|
||||
{"path": "storage1/a", "label": "Again"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "already_exists",
|
||||
"message": "Bookmark already exists for path",
|
||||
"details": {"path": "storage1/a"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_traversal_attempt(self) -> None:
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/bookmarks",
|
||||
{"path": "storage1/../etc", "label": "Bad"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,105 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_browse_service
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.browse_service import BrowseService
|
||||
|
||||
|
||||
class BrowseApiGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.root = Path(self.temp_dir.name) / "root"
|
||||
self.root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
folder = self.root / "folder"
|
||||
folder.mkdir()
|
||||
file_path = self.root / "video.mkv"
|
||||
file_path.write_bytes(b"abc")
|
||||
|
||||
hidden_dir = self.root / ".hidden_dir"
|
||||
hidden_dir.mkdir()
|
||||
hidden_file = self.root / ".secret"
|
||||
hidden_file.write_bytes(b"x")
|
||||
|
||||
mtime = 1710000000
|
||||
for path in [folder, file_path, hidden_dir, hidden_file]:
|
||||
Path(path).touch()
|
||||
Path(path).chmod(0o755)
|
||||
import os
|
||||
os.utime(path, (mtime, mtime))
|
||||
|
||||
service = BrowseService(
|
||||
path_guard=PathGuard({"storage1": str(self.root)}),
|
||||
filesystem=FilesystemAdapter(),
|
||||
)
|
||||
async def _override_browse_service() -> BrowseService:
|
||||
return service
|
||||
|
||||
app.dependency_overrides[get_browse_service] = _override_browse_service
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _get(self, path: str, show_hidden: str | None = None) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
params = {"path": path}
|
||||
if show_hidden is not None:
|
||||
params["show_hidden"] = show_hidden
|
||||
return await client.get("/api/browse", params=params)
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
def test_browse_success_default_hides_hidden_entries(self) -> None:
|
||||
response = self._get("storage1")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
modified = datetime.fromtimestamp(1710000000, tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
expected = {
|
||||
"path": "storage1",
|
||||
"directories": [
|
||||
{
|
||||
"name": "folder",
|
||||
"path": "storage1/folder",
|
||||
"modified": modified,
|
||||
}
|
||||
],
|
||||
"files": [
|
||||
{
|
||||
"name": "video.mkv",
|
||||
"path": "storage1/video.mkv",
|
||||
"size": 3,
|
||||
"modified": modified,
|
||||
}
|
||||
],
|
||||
}
|
||||
self.assertEqual(response.json(), expected)
|
||||
|
||||
def test_browse_success_show_hidden_true(self) -> None:
|
||||
response = self._get("storage1", show_hidden="true")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = response.json()
|
||||
directory_names = [item["name"] for item in body["directories"]]
|
||||
file_names = [item["name"] for item in body["files"]]
|
||||
self.assertEqual(directory_names, [".hidden_dir", "folder"])
|
||||
self.assertEqual(file_names, [".secret", "video.mkv"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,211 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_copy_task_service, get_task_service
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.copy_task_service import CopyTaskService
|
||||
from backend.app.services.task_service import TaskService
|
||||
from backend.app.tasks_runner import TaskRunner
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
|
||||
|
||||
class FailingFilesystemAdapter(FilesystemAdapter):
|
||||
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
||||
raise OSError("forced copy failure")
|
||||
|
||||
|
||||
class CopyApiGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.root = Path(self.temp_dir.name) / "root"
|
||||
self.root.mkdir(parents=True, exist_ok=True)
|
||||
self.repo = TaskRepository(str(Path(self.temp_dir.name) / "tasks.db"))
|
||||
|
||||
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
||||
self._set_services(path_guard=path_guard, filesystem=FilesystemAdapter())
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _set_services(self, path_guard: PathGuard, filesystem: FilesystemAdapter) -> None:
|
||||
runner = TaskRunner(repository=self.repo, filesystem=filesystem)
|
||||
copy_service = CopyTaskService(path_guard=path_guard, repository=self.repo, runner=runner)
|
||||
task_service = TaskService(repository=self.repo)
|
||||
|
||||
async def _override_copy_service() -> CopyTaskService:
|
||||
return copy_service
|
||||
|
||||
async def _override_task_service() -> TaskService:
|
||||
return task_service
|
||||
|
||||
app.dependency_overrides[get_copy_task_service] = _override_copy_service
|
||||
app.dependency_overrides[get_task_service] = _override_task_service
|
||||
|
||||
def _request(self, method: str, url: str, payload: dict | None = None) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
if method == "POST":
|
||||
return await client.post(url, json=payload)
|
||||
return await client.get(url)
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
def _wait_task(self, task_id: str, timeout_s: float = 2.0) -> dict:
|
||||
deadline = time.time() + timeout_s
|
||||
while time.time() < deadline:
|
||||
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||
body = response.json()
|
||||
if body["status"] in {"completed", "failed"}:
|
||||
return body
|
||||
time.sleep(0.02)
|
||||
self.fail("task did not reach terminal state in time")
|
||||
|
||||
def test_copy_success_create_task_shape(self) -> None:
|
||||
src = self.root / "source.txt"
|
||||
src.write_text("hello", encoding="utf-8")
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/copy",
|
||||
{"source": "storage1/source.txt", "destination": "storage1/copy.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
body = response.json()
|
||||
self.assertIn("task_id", body)
|
||||
self.assertEqual(body["status"], "queued")
|
||||
|
||||
detail = self._wait_task(body["task_id"])
|
||||
self.assertEqual(detail["status"], "completed")
|
||||
self.assertEqual(detail["total_bytes"], 5)
|
||||
self.assertEqual(detail["done_bytes"], 5)
|
||||
self.assertTrue((self.root / "copy.txt").exists())
|
||||
self.assertEqual((self.root / "copy.txt").read_text(encoding="utf-8"), "hello")
|
||||
|
||||
def test_copy_source_not_found(self) -> None:
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/copy",
|
||||
{"source": "storage1/missing.txt", "destination": "storage1/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "path_not_found",
|
||||
"message": "Requested path was not found",
|
||||
"details": {"path": "storage1/missing.txt"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_copy_source_is_directory_type_conflict(self) -> None:
|
||||
(self.root / "dir").mkdir()
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/copy",
|
||||
{"source": "storage1/dir", "destination": "storage1/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(response.json()["error"]["code"], "type_conflict")
|
||||
|
||||
def test_copy_destination_exists_already_exists(self) -> None:
|
||||
(self.root / "source.txt").write_text("x", encoding="utf-8")
|
||||
(self.root / "exists.txt").write_text("y", encoding="utf-8")
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/copy",
|
||||
{"source": "storage1/source.txt", "destination": "storage1/exists.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "already_exists",
|
||||
"message": "Target path already exists",
|
||||
"details": {"path": "storage1/exists.txt"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_copy_traversal_source(self) -> None:
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/copy",
|
||||
{"source": "storage1/../etc/passwd", "destination": "storage1/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
||||
|
||||
def test_copy_traversal_destination(self) -> None:
|
||||
(self.root / "source.txt").write_text("x", encoding="utf-8")
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/copy",
|
||||
{"source": "storage1/source.txt", "destination": "storage1/../etc/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
||||
|
||||
def test_copy_source_symlink_rejected(self) -> None:
|
||||
target = self.root / "real.txt"
|
||||
target.write_text("x", encoding="utf-8")
|
||||
link = self.root / "link.txt"
|
||||
link.symlink_to(target)
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/copy",
|
||||
{"source": "storage1/link.txt", "destination": "storage1/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(response.json()["error"]["code"], "type_conflict")
|
||||
|
||||
def test_copy_runtime_io_error_failed_task_shape(self) -> None:
|
||||
src = self.root / "source.txt"
|
||||
src.write_text("hello", encoding="utf-8")
|
||||
|
||||
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
||||
self._set_services(path_guard=path_guard, filesystem=FailingFilesystemAdapter())
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/copy",
|
||||
{"source": "storage1/source.txt", "destination": "storage1/copy.txt"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 202)
|
||||
|
||||
task_id = response.json()["task_id"]
|
||||
detail = self._wait_task(task_id)
|
||||
self.assertEqual(detail["status"], "failed")
|
||||
self.assertEqual(detail["error_code"], "io_error")
|
||||
self.assertEqual(detail["failed_item"], str(src))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,110 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_browse_service
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.browse_service import BrowseService
|
||||
|
||||
|
||||
class BrowseApiErrorsGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.root = Path(self.temp_dir.name) / "root"
|
||||
self.root.mkdir(parents=True, exist_ok=True)
|
||||
(self.root / "a.txt").write_text("a", encoding="utf-8")
|
||||
|
||||
service = BrowseService(
|
||||
path_guard=PathGuard({"storage1": str(self.root)}),
|
||||
filesystem=FilesystemAdapter(),
|
||||
)
|
||||
async def _override_browse_service() -> BrowseService:
|
||||
return service
|
||||
|
||||
app.dependency_overrides[get_browse_service] = _override_browse_service
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _get(self, path: str) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
return await client.get("/api/browse", params={"path": path})
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
def test_invalid_root_alias_error_shape(self) -> None:
|
||||
response = self._get("unknown/path")
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "invalid_root_alias",
|
||||
"message": "Unknown root alias",
|
||||
"details": {"path": "unknown/path"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_traversal_error_shape(self) -> None:
|
||||
response = self._get("storage1/../etc")
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "path_traversal_detected",
|
||||
"message": "Path traversal is not allowed",
|
||||
"details": {"path": "storage1/../etc"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_not_found_error_shape(self) -> None:
|
||||
response = self._get("storage1/missing")
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "path_not_found",
|
||||
"message": "Requested path was not found",
|
||||
"details": {"path": "storage1/missing"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_type_conflict_error_shape(self) -> None:
|
||||
response = self._get("storage1/a.txt")
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "path_type_conflict",
|
||||
"message": "Requested path is not a directory",
|
||||
"details": {"path": "storage1/a.txt"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,323 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_file_ops_service
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.file_ops_service import FileOpsService
|
||||
|
||||
|
||||
class FileOpsApiGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.root = Path(self.temp_dir.name) / "root"
|
||||
self.root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.scope = self.root / "scope"
|
||||
self.scope.mkdir(parents=True, exist_ok=True)
|
||||
(self.scope / "old.txt").write_text("x", encoding="utf-8")
|
||||
(self.scope / "existing.txt").write_text("y", encoding="utf-8")
|
||||
|
||||
service = FileOpsService(
|
||||
path_guard=PathGuard({"storage1": str(self.root)}),
|
||||
filesystem=FilesystemAdapter(),
|
||||
)
|
||||
|
||||
async def _override_file_ops_service() -> FileOpsService:
|
||||
return service
|
||||
|
||||
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _post(self, url: str, payload: dict[str, str]) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
return await client.post(url, json=payload)
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
def test_mkdir_success(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/mkdir",
|
||||
{"parent_path": "storage1/scope", "name": "new_folder"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json(), {"path": "storage1/scope/new_folder"})
|
||||
self.assertTrue((self.scope / "new_folder").is_dir())
|
||||
|
||||
def test_mkdir_conflict_directory_exists(self) -> None:
|
||||
(self.scope / "existing_dir").mkdir()
|
||||
response = self._post(
|
||||
"/api/files/mkdir",
|
||||
{"parent_path": "storage1/scope", "name": "existing_dir"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "already_exists",
|
||||
"message": "Target path already exists",
|
||||
"details": {"path": "storage1/scope/existing_dir"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_mkdir_conflict_file_exists(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/mkdir",
|
||||
{"parent_path": "storage1/scope", "name": "existing.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "already_exists",
|
||||
"message": "Target path already exists",
|
||||
"details": {"path": "storage1/scope/existing.txt"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_rename_success(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/rename",
|
||||
{"path": "storage1/scope/old.txt", "new_name": "renamed.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json(), {"path": "storage1/scope/renamed.txt"})
|
||||
self.assertFalse((self.scope / "old.txt").exists())
|
||||
self.assertTrue((self.scope / "renamed.txt").exists())
|
||||
|
||||
def test_rename_conflict(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/rename",
|
||||
{"path": "storage1/scope/old.txt", "new_name": "existing.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "already_exists",
|
||||
"message": "Target path already exists",
|
||||
"details": {"path": "storage1/scope/existing.txt"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_rename_not_found(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/rename",
|
||||
{"path": "storage1/scope/missing.txt", "new_name": "renamed.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "path_not_found",
|
||||
"message": "Requested path was not found",
|
||||
"details": {"path": "storage1/scope/missing.txt"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_rename_invalid_new_name_dotdot(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/rename",
|
||||
{"path": "storage1/scope/old.txt", "new_name": ".."},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "invalid_request",
|
||||
"message": "Invalid name",
|
||||
"details": {"new_name": ".."},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_rename_invalid_new_name_with_slash(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/rename",
|
||||
{"path": "storage1/scope/old.txt", "new_name": "a/b"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "invalid_request",
|
||||
"message": "Invalid name",
|
||||
"details": {"new_name": "a/b"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_mkdir_invalid_path(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/mkdir",
|
||||
{"parent_path": "storage1/scope", "name": "bad/name"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "invalid_request",
|
||||
"message": "Invalid name",
|
||||
"details": {"name": "bad/name"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_mkdir_traversal_attempt(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/mkdir",
|
||||
{"parent_path": "storage1/../etc", "name": "x"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "path_traversal_detected",
|
||||
"message": "Path traversal is not allowed",
|
||||
"details": {"path": "storage1/../etc"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_delete_file_success(self) -> None:
|
||||
target = self.scope / "delete_me.txt"
|
||||
target.write_text("z", encoding="utf-8")
|
||||
|
||||
response = self._post(
|
||||
"/api/files/delete",
|
||||
{"path": "storage1/scope/delete_me.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json(), {"path": "storage1/scope/delete_me.txt"})
|
||||
self.assertFalse(target.exists())
|
||||
|
||||
def test_delete_empty_directory_success(self) -> None:
|
||||
target = self.scope / "empty_dir"
|
||||
target.mkdir()
|
||||
|
||||
response = self._post(
|
||||
"/api/files/delete",
|
||||
{"path": "storage1/scope/empty_dir"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json(), {"path": "storage1/scope/empty_dir"})
|
||||
self.assertFalse(target.exists())
|
||||
|
||||
def test_delete_not_found(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/delete",
|
||||
{"path": "storage1/scope/missing.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "path_not_found",
|
||||
"message": "Requested path was not found",
|
||||
"details": {"path": "storage1/scope/missing.txt"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_delete_traversal_attempt(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/delete",
|
||||
{"path": "storage1/../etc/passwd"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "path_traversal_detected",
|
||||
"message": "Path traversal is not allowed",
|
||||
"details": {"path": "storage1/../etc/passwd"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_delete_non_empty_directory_conflict(self) -> None:
|
||||
target = self.scope / "non_empty"
|
||||
target.mkdir()
|
||||
(target / "a.txt").write_text("a", encoding="utf-8")
|
||||
|
||||
response = self._post(
|
||||
"/api/files/delete",
|
||||
{"path": "storage1/scope/non_empty"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "directory_not_empty",
|
||||
"message": "Directory is not empty",
|
||||
"details": {"path": "storage1/scope/non_empty"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_delete_invalid_path(self) -> None:
|
||||
response = self._post(
|
||||
"/api/files/delete",
|
||||
{"path": ""},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "invalid_request",
|
||||
"message": "Query parameter 'path' is required",
|
||||
"details": None,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,215 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_move_task_service, get_task_service
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.move_task_service import MoveTaskService
|
||||
from backend.app.services.task_service import TaskService
|
||||
from backend.app.tasks_runner import TaskRunner
|
||||
|
||||
|
||||
class FailingDeleteFilesystemAdapter(FilesystemAdapter):
|
||||
def delete_file(self, path: Path) -> None:
|
||||
raise OSError("forced delete failure")
|
||||
|
||||
|
||||
class MoveApiGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.root1 = Path(self.temp_dir.name) / "root1"
|
||||
self.root2 = Path(self.temp_dir.name) / "root2"
|
||||
self.root1.mkdir(parents=True, exist_ok=True)
|
||||
self.root2.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.repo = TaskRepository(str(Path(self.temp_dir.name) / "tasks.db"))
|
||||
path_guard = PathGuard({"storage1": str(self.root1), "storage2": str(self.root2)})
|
||||
self._set_services(path_guard=path_guard, filesystem=FilesystemAdapter())
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _set_services(self, path_guard: PathGuard, filesystem: FilesystemAdapter) -> None:
|
||||
runner = TaskRunner(repository=self.repo, filesystem=filesystem)
|
||||
move_service = MoveTaskService(path_guard=path_guard, repository=self.repo, runner=runner)
|
||||
task_service = TaskService(repository=self.repo)
|
||||
|
||||
async def _override_move_service() -> MoveTaskService:
|
||||
return move_service
|
||||
|
||||
async def _override_task_service() -> TaskService:
|
||||
return task_service
|
||||
|
||||
app.dependency_overrides[get_move_task_service] = _override_move_service
|
||||
app.dependency_overrides[get_task_service] = _override_task_service
|
||||
|
||||
def _request(self, method: str, url: str, payload: dict | None = None) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
if method == "POST":
|
||||
return await client.post(url, json=payload)
|
||||
return await client.get(url)
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
def _wait_task(self, task_id: str, timeout_s: float = 2.0) -> dict:
|
||||
deadline = time.time() + timeout_s
|
||||
while time.time() < deadline:
|
||||
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||
body = response.json()
|
||||
if body["status"] in {"completed", "failed"}:
|
||||
return body
|
||||
time.sleep(0.02)
|
||||
self.fail("task did not reach terminal state in time")
|
||||
|
||||
def test_move_success_same_root_create_task_shape_and_completed(self) -> None:
|
||||
src = self.root1 / "source.txt"
|
||||
src.write_text("hello", encoding="utf-8")
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/move",
|
||||
{"source": "storage1/source.txt", "destination": "storage1/moved.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
body = response.json()
|
||||
self.assertIn("task_id", body)
|
||||
self.assertEqual(body["status"], "queued")
|
||||
|
||||
detail = self._wait_task(body["task_id"])
|
||||
self.assertEqual(detail["status"], "completed")
|
||||
self.assertTrue((self.root1 / "moved.txt").exists())
|
||||
self.assertFalse(src.exists())
|
||||
|
||||
def test_move_success_cross_root_create_task_shape_and_completed(self) -> None:
|
||||
src = self.root1 / "source.txt"
|
||||
src.write_text("hello", encoding="utf-8")
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/move",
|
||||
{"source": "storage1/source.txt", "destination": "storage2/moved.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
body = response.json()
|
||||
self.assertIn("task_id", body)
|
||||
self.assertEqual(body["status"], "queued")
|
||||
|
||||
detail = self._wait_task(body["task_id"])
|
||||
self.assertEqual(detail["status"], "completed")
|
||||
self.assertTrue((self.root2 / "moved.txt").exists())
|
||||
self.assertFalse(src.exists())
|
||||
|
||||
def test_move_source_not_found(self) -> None:
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/move",
|
||||
{"source": "storage1/missing.txt", "destination": "storage1/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.assertEqual(response.json()["error"]["code"], "path_not_found")
|
||||
|
||||
def test_move_source_is_directory_type_conflict(self) -> None:
|
||||
(self.root1 / "dir").mkdir()
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/move",
|
||||
{"source": "storage1/dir", "destination": "storage1/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(response.json()["error"]["code"], "type_conflict")
|
||||
|
||||
def test_move_destination_exists_already_exists(self) -> None:
|
||||
(self.root1 / "source.txt").write_text("x", encoding="utf-8")
|
||||
(self.root1 / "exists.txt").write_text("y", encoding="utf-8")
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/move",
|
||||
{"source": "storage1/source.txt", "destination": "storage1/exists.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(response.json()["error"]["code"], "already_exists")
|
||||
|
||||
def test_move_traversal_source(self) -> None:
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/move",
|
||||
{"source": "storage1/../etc/passwd", "destination": "storage1/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
||||
|
||||
def test_move_traversal_destination(self) -> None:
|
||||
(self.root1 / "source.txt").write_text("x", encoding="utf-8")
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/move",
|
||||
{"source": "storage1/source.txt", "destination": "storage1/../etc/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
||||
|
||||
def test_move_source_symlink_rejected(self) -> None:
|
||||
target = self.root1 / "real.txt"
|
||||
target.write_text("x", encoding="utf-8")
|
||||
link = self.root1 / "link.txt"
|
||||
link.symlink_to(target)
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/move",
|
||||
{"source": "storage1/link.txt", "destination": "storage1/out.txt"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 409)
|
||||
self.assertEqual(response.json()["error"]["code"], "type_conflict")
|
||||
|
||||
def test_move_runtime_io_error_failed_task_shape(self) -> None:
|
||||
src = self.root1 / "source.txt"
|
||||
src.write_text("hello", encoding="utf-8")
|
||||
|
||||
path_guard = PathGuard({"storage1": str(self.root1), "storage2": str(self.root2)})
|
||||
self._set_services(path_guard=path_guard, filesystem=FailingDeleteFilesystemAdapter())
|
||||
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/files/move",
|
||||
{"source": "storage1/source.txt", "destination": "storage2/moved.txt"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 202)
|
||||
|
||||
task_id = response.json()["task_id"]
|
||||
detail = self._wait_task(task_id)
|
||||
|
||||
self.assertEqual(detail["status"], "failed")
|
||||
self.assertEqual(detail["error_code"], "io_error")
|
||||
self.assertTrue((self.root2 / "moved.txt").exists())
|
||||
self.assertTrue(src.exists())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,261 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_task_service
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.main import app
|
||||
from backend.app.services.task_service import TaskService
|
||||
|
||||
|
||||
class TasksApiGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.db_path = str(Path(self.temp_dir.name) / "tasks.db")
|
||||
self.repo = TaskRepository(self.db_path)
|
||||
self.service = TaskService(self.repo)
|
||||
|
||||
async def _override_task_service() -> TaskService:
|
||||
return self.service
|
||||
|
||||
app.dependency_overrides[get_task_service] = _override_task_service
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _get(self, url: str) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
return await client.get(url)
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
def _insert_task(
|
||||
self,
|
||||
*,
|
||||
task_id: str,
|
||||
operation: str,
|
||||
status: str,
|
||||
source: str,
|
||||
destination: str,
|
||||
created_at: str,
|
||||
started_at: str | None = None,
|
||||
finished_at: str | None = None,
|
||||
done_bytes: int | None = None,
|
||||
total_bytes: int | None = None,
|
||||
done_items: int | None = None,
|
||||
total_items: int | None = None,
|
||||
current_item: str | None = None,
|
||||
failed_item: str | None = None,
|
||||
error_code: str | None = None,
|
||||
error_message: str | None = None,
|
||||
) -> None:
|
||||
self.repo.insert_task_for_testing(
|
||||
{
|
||||
"id": task_id,
|
||||
"operation": operation,
|
||||
"status": status,
|
||||
"source": source,
|
||||
"destination": destination,
|
||||
"done_bytes": done_bytes,
|
||||
"total_bytes": total_bytes,
|
||||
"done_items": done_items,
|
||||
"total_items": total_items,
|
||||
"current_item": current_item,
|
||||
"failed_item": failed_item,
|
||||
"error_code": error_code,
|
||||
"error_message": error_message,
|
||||
"created_at": created_at,
|
||||
"started_at": started_at,
|
||||
"finished_at": finished_at,
|
||||
}
|
||||
)
|
||||
|
||||
def test_get_tasks_empty_list(self) -> None:
|
||||
response = self._get("/api/tasks")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json(), {"items": []})
|
||||
|
||||
def test_get_tasks_list_shape(self) -> None:
|
||||
self._insert_task(
|
||||
task_id="task-old",
|
||||
operation="copy",
|
||||
status="completed",
|
||||
source="storage1/a.txt",
|
||||
destination="storage2/a.txt",
|
||||
created_at="2026-03-10T10:00:00Z",
|
||||
finished_at="2026-03-10T10:00:05Z",
|
||||
)
|
||||
self._insert_task(
|
||||
task_id="task-new",
|
||||
operation="move",
|
||||
status="running",
|
||||
source="storage1/b.txt",
|
||||
destination="storage2/b.txt",
|
||||
created_at="2026-03-10T10:01:00Z",
|
||||
)
|
||||
|
||||
response = self._get("/api/tasks")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": "task-new",
|
||||
"operation": "move",
|
||||
"status": "running",
|
||||
"source": "storage1/b.txt",
|
||||
"destination": "storage2/b.txt",
|
||||
"created_at": "2026-03-10T10:01:00Z",
|
||||
"finished_at": None,
|
||||
},
|
||||
{
|
||||
"id": "task-old",
|
||||
"operation": "copy",
|
||||
"status": "completed",
|
||||
"source": "storage1/a.txt",
|
||||
"destination": "storage2/a.txt",
|
||||
"created_at": "2026-03-10T10:00:00Z",
|
||||
"finished_at": "2026-03-10T10:00:05Z",
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
def test_get_task_detail_queued(self) -> None:
|
||||
self._insert_task(
|
||||
task_id="task-queued",
|
||||
operation="copy",
|
||||
status="queued",
|
||||
source="storage1/a.txt",
|
||||
destination="storage2/a.txt",
|
||||
created_at="2026-03-10T10:00:00Z",
|
||||
)
|
||||
|
||||
response = self._get("/api/tasks/task-queued")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"id": "task-queued",
|
||||
"operation": "copy",
|
||||
"status": "queued",
|
||||
"source": "storage1/a.txt",
|
||||
"destination": "storage2/a.txt",
|
||||
"done_bytes": None,
|
||||
"total_bytes": None,
|
||||
"done_items": None,
|
||||
"total_items": None,
|
||||
"current_item": None,
|
||||
"failed_item": None,
|
||||
"error_code": None,
|
||||
"error_message": None,
|
||||
"created_at": "2026-03-10T10:00:00Z",
|
||||
"started_at": None,
|
||||
"finished_at": None,
|
||||
},
|
||||
)
|
||||
|
||||
def test_get_task_detail_running(self) -> None:
|
||||
self._insert_task(
|
||||
task_id="task-running",
|
||||
operation="move",
|
||||
status="running",
|
||||
source="storage1/a.txt",
|
||||
destination="storage2/a.txt",
|
||||
created_at="2026-03-10T10:00:00Z",
|
||||
started_at="2026-03-10T10:00:01Z",
|
||||
done_bytes=1024,
|
||||
total_bytes=2048,
|
||||
done_items=1,
|
||||
total_items=2,
|
||||
current_item="storage1/a.txt",
|
||||
)
|
||||
|
||||
response = self._get("/api/tasks/task-running")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = response.json()
|
||||
self.assertEqual(body["status"], "running")
|
||||
self.assertEqual(body["done_bytes"], 1024)
|
||||
self.assertEqual(body["total_bytes"], 2048)
|
||||
self.assertEqual(body["current_item"], "storage1/a.txt")
|
||||
|
||||
def test_get_task_detail_completed(self) -> None:
|
||||
self._insert_task(
|
||||
task_id="task-completed",
|
||||
operation="copy",
|
||||
status="completed",
|
||||
source="storage1/a.txt",
|
||||
destination="storage2/a.txt",
|
||||
created_at="2026-03-10T10:00:00Z",
|
||||
started_at="2026-03-10T10:00:01Z",
|
||||
finished_at="2026-03-10T10:00:03Z",
|
||||
done_bytes=2048,
|
||||
total_bytes=2048,
|
||||
)
|
||||
|
||||
response = self._get("/api/tasks/task-completed")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = response.json()
|
||||
self.assertEqual(body["status"], "completed")
|
||||
self.assertEqual(body["finished_at"], "2026-03-10T10:00:03Z")
|
||||
self.assertEqual(body["error_code"], None)
|
||||
|
||||
def test_get_task_detail_failed(self) -> None:
|
||||
self._insert_task(
|
||||
task_id="task-failed",
|
||||
operation="move",
|
||||
status="failed",
|
||||
source="storage1/a.txt",
|
||||
destination="storage2/a.txt",
|
||||
created_at="2026-03-10T10:00:00Z",
|
||||
started_at="2026-03-10T10:00:01Z",
|
||||
finished_at="2026-03-10T10:00:02Z",
|
||||
failed_item="storage1/a.txt",
|
||||
error_code="io_error",
|
||||
error_message="write failed",
|
||||
)
|
||||
|
||||
response = self._get("/api/tasks/task-failed")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = response.json()
|
||||
self.assertEqual(body["status"], "failed")
|
||||
self.assertEqual(body["failed_item"], "storage1/a.txt")
|
||||
self.assertEqual(body["error_code"], "io_error")
|
||||
self.assertEqual(body["error_message"], "write failed")
|
||||
|
||||
def test_get_task_not_found(self) -> None:
|
||||
response = self._get("/api/tasks/task-missing")
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"error": {
|
||||
"code": "task_not_found",
|
||||
"message": "Task was not found",
|
||||
"details": {"task_id": "task-missing"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,49 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from starlette.routing import Mount
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.main import app
|
||||
|
||||
|
||||
class UiSmokeGoldenTest(unittest.TestCase):
|
||||
def _ui_mount(self) -> Mount:
|
||||
for route in app.routes:
|
||||
if isinstance(route, Mount) and route.path == "/ui":
|
||||
return route
|
||||
self.fail("Expected /ui mount to be registered")
|
||||
|
||||
def test_ui_mount_and_index_contains_expected_panels(self) -> None:
|
||||
mount = self._ui_mount()
|
||||
self.assertIsInstance(mount.app, StaticFiles)
|
||||
index_path = Path(mount.app.directory) / "index.html"
|
||||
self.assertTrue(index_path.exists())
|
||||
|
||||
body = index_path.read_text(encoding="utf-8")
|
||||
self.assertIn('id="workspace"', body)
|
||||
self.assertIn('id="footer-bar"', body)
|
||||
self.assertIn('id="left-pane"', body)
|
||||
self.assertIn('id="right-pane"', body)
|
||||
self.assertNotIn('id="bookmarks-panel"', body)
|
||||
self.assertNotIn('id="tasks-panel"', body)
|
||||
|
||||
def test_ui_static_assets_are_present_and_mapped(self) -> None:
|
||||
mount = self._ui_mount()
|
||||
static_root = Path(mount.app.directory)
|
||||
self.assertTrue((static_root / "app.js").exists())
|
||||
self.assertTrue((static_root / "style.css").exists())
|
||||
|
||||
app_js_url = app.url_path_for("ui", path="/app.js")
|
||||
style_css_url = app.url_path_for("ui", path="/style.css")
|
||||
self.assertEqual(app_js_url, "/ui/app.js")
|
||||
self.assertEqual(style_css_url, "/ui/style.css")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,38 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.db.bookmark_repository import BookmarkRepository
|
||||
|
||||
|
||||
class BookmarkRepositoryTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.repo = BookmarkRepository(str(Path(self.temp_dir.name) / "bookmarks.db"))
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def test_duplicate_path_raises_integrity_error(self) -> None:
|
||||
self.repo.create_bookmark(path="storage1/a", label="A")
|
||||
with self.assertRaises(sqlite3.IntegrityError):
|
||||
self.repo.create_bookmark(path="storage1/a", label="Again")
|
||||
|
||||
def test_list_order_created_at_desc(self) -> None:
|
||||
first = self.repo.create_bookmark(path="storage1/a", label="A")
|
||||
second = self.repo.create_bookmark(path="storage1/b", label="B")
|
||||
|
||||
items = self.repo.list_bookmarks()
|
||||
|
||||
self.assertEqual(items[0]["id"], second["id"])
|
||||
self.assertEqual(items[1]["id"], first["id"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,57 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
|
||||
|
||||
class PathGuardTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.root = Path(self.temp_dir.name) / "root"
|
||||
self.root.mkdir(parents=True, exist_ok=True)
|
||||
self.other = Path(self.temp_dir.name) / "other"
|
||||
self.other.mkdir(parents=True, exist_ok=True)
|
||||
self.guard = PathGuard({"storage1": str(self.root)})
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def test_resolve_under_whitelisted_root(self) -> None:
|
||||
target = self.root / "series"
|
||||
target.mkdir()
|
||||
|
||||
resolved = self.guard.resolve_directory_path("storage1/series")
|
||||
|
||||
self.assertEqual(resolved.alias, "storage1")
|
||||
self.assertEqual(resolved.relative, "storage1/series")
|
||||
self.assertEqual(resolved.absolute, target.resolve())
|
||||
|
||||
def test_rejects_path_traversal(self) -> None:
|
||||
with self.assertRaises(AppError) as ctx:
|
||||
self.guard.resolve_path("storage1/../etc")
|
||||
|
||||
self.assertEqual(ctx.exception.code, "path_traversal_detected")
|
||||
self.assertEqual(ctx.exception.status_code, 403)
|
||||
|
||||
def test_rejects_symlink_escape(self) -> None:
|
||||
outside_dir = self.other / "escape"
|
||||
outside_dir.mkdir()
|
||||
symlink = self.root / "link"
|
||||
symlink.symlink_to(outside_dir, target_is_directory=True)
|
||||
|
||||
with self.assertRaises(AppError) as ctx:
|
||||
self.guard.resolve_directory_path("storage1/link")
|
||||
|
||||
self.assertEqual(ctx.exception.code, "path_outside_whitelist")
|
||||
self.assertEqual(ctx.exception.status_code, 403)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,63 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
|
||||
|
||||
class TaskRepositoryTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.db_path = str(Path(self.temp_dir.name) / "tasks.db")
|
||||
self.repo = TaskRepository(self.db_path)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def test_list_tasks_sorted_created_at_desc(self) -> None:
|
||||
self.repo.insert_task_for_testing(
|
||||
{
|
||||
"id": "task-old",
|
||||
"operation": "copy",
|
||||
"status": "queued",
|
||||
"source": "storage1/a",
|
||||
"destination": "storage2/a",
|
||||
"created_at": "2026-03-10T09:00:00Z",
|
||||
}
|
||||
)
|
||||
self.repo.insert_task_for_testing(
|
||||
{
|
||||
"id": "task-new",
|
||||
"operation": "move",
|
||||
"status": "queued",
|
||||
"source": "storage1/b",
|
||||
"destination": "storage2/b",
|
||||
"created_at": "2026-03-10T10:00:00Z",
|
||||
}
|
||||
)
|
||||
|
||||
tasks = self.repo.list_tasks()
|
||||
|
||||
self.assertEqual([task["id"] for task in tasks], ["task-new", "task-old"])
|
||||
|
||||
def test_insert_rejects_invalid_status(self) -> None:
|
||||
with self.assertRaises(ValueError):
|
||||
self.repo.insert_task_for_testing(
|
||||
{
|
||||
"id": "task-x",
|
||||
"operation": "copy",
|
||||
"status": "unknown",
|
||||
"source": "storage1/a",
|
||||
"destination": "storage2/a",
|
||||
"created_at": "2026-03-10T09:00:00Z",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user