Compare commits
63 Commits
939a7fd191
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| fc4ec39646 | |||
| 9537a29de3 | |||
| ae6a9d8c45 | |||
| 61d0c8de41 | |||
| 3d82699535 | |||
| 492082c2b7 | |||
| 9a7ca4e2db | |||
| 66abf991d8 | |||
| a52493459a | |||
| 7d910479f9 | |||
| 73b09d2802 | |||
| 9d5fb5a0c9 | |||
| c0bd6b647c | |||
| cc5a978e79 | |||
| 7f7665880f | |||
| 14600dd5b6 | |||
| a816f71ad5 | |||
| 15c85e874c | |||
| 90b1828160 | |||
| 5265d6458c | |||
| e85e51d64a | |||
| 3fb8528b0e | |||
| 8af4b1a6b0 | |||
| d459f3c524 | |||
| 2981ac2796 | |||
| d463b3977d | |||
| 592b10acc2 | |||
| 8ea2bd1498 | |||
| ea337338e3 | |||
| 7e7c2f3958 | |||
| dab87878cc | |||
| 610a648fd1 | |||
| af1d1eea23 | |||
| 6b4fb34b40 | |||
| 3dfbc64913 | |||
| 73c539ba4a | |||
| d08ca24c87 | |||
| 4e1288fe47 | |||
| 8908b1dce9 | |||
| 84f3eedb74 | |||
| 054e736aa6 | |||
| 7bb59a2b65 | |||
| 0615324607 | |||
| 3987de27e0 | |||
| d84b3da561 | |||
| f092007998 | |||
| f0b04fd4ee | |||
| 287dddb7b3 | |||
| e2e206573d | |||
| 360815498e | |||
| 8fe9d0f436 | |||
| 8d1ff79912 | |||
| 24d47dce8c | |||
| e43d49540d | |||
| 7ab233be2c | |||
| bf4bb3d917 | |||
| 4ba4020c2a | |||
| 018c3dcd94 | |||
| 05569576a7 | |||
| ac18291a3c | |||
| e25d43200f | |||
| 09c3e14dea | |||
| ab83ee3f20 |
@@ -0,0 +1,7 @@
|
|||||||
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
|
*.log
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
.DS_Store
|
||||||
|
.sqlite3
|
||||||
@@ -18,7 +18,7 @@ RUN mkdir -p /app/backend /app/html /app/conf /Volumes/8TB /Volumes/8TB_RAID1
|
|||||||
|
|
||||||
# Installeer een lichtgewicht Python API framework (FastAPI)
|
# Installeer een lichtgewicht Python API framework (FastAPI)
|
||||||
# We gebruiken --break-system-packages omdat we in een container zitten
|
# We gebruiken --break-system-packages omdat we in een container zitten
|
||||||
RUN pip3 install fastapi uvicorn --break-system-packages
|
RUN pip3 install fastapi uvicorn python-multipart --break-system-packages
|
||||||
|
|
||||||
# Exposeer de poort voor de webinterface
|
# Exposeer de poort voor de webinterface
|
||||||
EXPOSE 8030
|
EXPOSE 8030
|
||||||
|
|||||||
@@ -0,0 +1,67 @@
|
|||||||
|
# Finder Commander
|
||||||
|
|
||||||
|
Lokale webapp die aanvoelt als een Midnight Commander-achtige file manager voor macOS/Linux home directories.
|
||||||
|
|
||||||
|
## Wat zit erin
|
||||||
|
|
||||||
|
- twee panelen naast elkaar
|
||||||
|
- actieve panel met duidelijke focusrand
|
||||||
|
- keyboard shortcuts:
|
||||||
|
- `Tab` wissel panel
|
||||||
|
- `↑ / ↓ / PgUp / PgDn` navigatie
|
||||||
|
- `Enter` open map of view bestand
|
||||||
|
- `Backspace` omhoog
|
||||||
|
- `Space` markeer bestand/map
|
||||||
|
- `F3` view
|
||||||
|
- `F4` edit
|
||||||
|
- `Shift+F4` nieuw bestand
|
||||||
|
- `F5` copy naar ander panel
|
||||||
|
- `F6` move naar ander panel
|
||||||
|
- `Shift+F6` rename
|
||||||
|
- `F7` nieuwe map
|
||||||
|
- `F8` delete naar `~/.Trash`
|
||||||
|
- `Ctrl+H` toggle verborgen bestanden
|
||||||
|
- `Ctrl+R` refresh
|
||||||
|
- `Alt+X` focus command line
|
||||||
|
- command line onderin met beperkte veilige commando's:
|
||||||
|
- `cd <path>`
|
||||||
|
- `mkdir <name>`
|
||||||
|
- `touch <name>`
|
||||||
|
- `select <glob>`
|
||||||
|
- `help`
|
||||||
|
- upload per panel
|
||||||
|
- viewer voor tekst en afbeeldingen
|
||||||
|
- editor voor tekstbestanden
|
||||||
|
- padbeveiliging: alles blijft binnen `~`
|
||||||
|
|
||||||
|
## Starten
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./run-local.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Het script kiest automatisch `python3.14` als dat aanwezig is. Bestaat er al een `.venv` met een andere Python minor-versie, dan wordt die automatisch opnieuw aangemaakt.
|
||||||
|
|
||||||
|
Open daarna:
|
||||||
|
|
||||||
|
```text
|
||||||
|
http://127.0.0.1:8765/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Python 3.14-notes
|
||||||
|
|
||||||
|
Deze build is opgeschoond voor Python 3.14.x:
|
||||||
|
|
||||||
|
- minimale Uvicorn-installatie (`uvicorn` i.p.v. `uvicorn[standard]`)
|
||||||
|
- geen optionele C-extensies nodig om te starten
|
||||||
|
- dependencies ondersteunen Python 3.14
|
||||||
|
|
||||||
|
## Opmerking
|
||||||
|
|
||||||
|
Dit is een **MC-like v1**, geen volledige Midnight Commander clone. Bewust niet ingebouwd:
|
||||||
|
|
||||||
|
- shell/subshell uitvoering
|
||||||
|
- chmod/chown dialogs
|
||||||
|
- archive browsing als directory
|
||||||
|
- remote FTP/SFTP panels
|
||||||
|
- diff/compare directories
|
||||||
@@ -0,0 +1,522 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import fnmatch
|
||||||
|
import html
|
||||||
|
import mimetypes
|
||||||
|
import os
|
||||||
|
import secrets
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Literal, Optional
|
||||||
|
|
||||||
|
from fastapi import Body, FastAPI, File, Form, HTTPException, Request, UploadFile
|
||||||
|
from fastapi.responses import FileResponse, HTMLResponse, JSONResponse
|
||||||
|
from fastapi.staticfiles import StaticFiles
|
||||||
|
from fastapi.templating import Jinja2Templates
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
APP_NAME = "Finder Commander"
|
||||||
|
HOME_ROOT = Path.home().resolve()
|
||||||
|
TRASH_DIR = HOME_ROOT / ".Trash"
|
||||||
|
MAX_TEXT_PREVIEW_BYTES = 2 * 1024 * 1024
|
||||||
|
CSRF_TOKEN = secrets.token_urlsafe(32)
|
||||||
|
|
||||||
|
app = FastAPI(title=APP_NAME)
|
||||||
|
app.mount("/static", StaticFiles(directory=str(Path(__file__).parent / "static")), name="static")
|
||||||
|
templates = Jinja2Templates(directory=str(Path(__file__).parent / "templates"))
|
||||||
|
|
||||||
|
|
||||||
|
class PathsPayload(BaseModel):
|
||||||
|
paths: list[str] = Field(default_factory=list)
|
||||||
|
destination_dir: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RenamePayload(BaseModel):
|
||||||
|
path: str
|
||||||
|
new_name: str
|
||||||
|
|
||||||
|
|
||||||
|
class DeletePayload(BaseModel):
|
||||||
|
paths: list[str] = Field(default_factory=list)
|
||||||
|
mode: Literal["trash", "permanent"] = "trash"
|
||||||
|
|
||||||
|
|
||||||
|
class CommandPayload(BaseModel):
|
||||||
|
command: str
|
||||||
|
cwd: str = ""
|
||||||
|
|
||||||
|
PathsPayload.model_rebuild()
|
||||||
|
RenamePayload.model_rebuild()
|
||||||
|
DeletePayload.model_rebuild()
|
||||||
|
CommandPayload.model_rebuild()
|
||||||
|
|
||||||
|
TEXT_SUFFIXES = {
|
||||||
|
".md",
|
||||||
|
".txt",
|
||||||
|
".py",
|
||||||
|
".js",
|
||||||
|
".ts",
|
||||||
|
".tsx",
|
||||||
|
".jsx",
|
||||||
|
".css",
|
||||||
|
".html",
|
||||||
|
".json",
|
||||||
|
".yaml",
|
||||||
|
".yml",
|
||||||
|
".toml",
|
||||||
|
".ini",
|
||||||
|
".env",
|
||||||
|
".log",
|
||||||
|
".xml",
|
||||||
|
".sh",
|
||||||
|
".zsh",
|
||||||
|
".bash",
|
||||||
|
".c",
|
||||||
|
".cpp",
|
||||||
|
".h",
|
||||||
|
".java",
|
||||||
|
".go",
|
||||||
|
".rs",
|
||||||
|
".sql",
|
||||||
|
".conf",
|
||||||
|
".service",
|
||||||
|
".container",
|
||||||
|
".network",
|
||||||
|
".pod",
|
||||||
|
".kube",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _now_iso() -> str:
|
||||||
|
return datetime.utcnow().isoformat(timespec="seconds") + "Z"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def rel_from_home(path: Path) -> str:
|
||||||
|
return "" if path == HOME_ROOT else str(path.relative_to(HOME_ROOT))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_within_home(candidate: Path) -> Path:
|
||||||
|
try:
|
||||||
|
candidate.relative_to(HOME_ROOT)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=403, detail="Path escapes home directory") from exc
|
||||||
|
return candidate
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_name(name: str) -> str:
|
||||||
|
name = (name or "").strip()
|
||||||
|
if not name or name in {".", ".."} or "/" in name:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid name")
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_user_path(raw_path: Optional[str], *, must_exist: bool = True) -> Path:
|
||||||
|
raw_path = (raw_path or "").strip()
|
||||||
|
candidate = (HOME_ROOT / raw_path).resolve(strict=False)
|
||||||
|
candidate = ensure_within_home(candidate)
|
||||||
|
if must_exist and not candidate.exists():
|
||||||
|
raise HTTPException(status_code=404, detail="Path not found")
|
||||||
|
return candidate
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def check_origin(request: Request) -> None:
|
||||||
|
origin = request.headers.get("origin")
|
||||||
|
if not origin:
|
||||||
|
return
|
||||||
|
expected = str(request.base_url).rstrip("/")
|
||||||
|
if origin.rstrip("/") != expected:
|
||||||
|
raise HTTPException(status_code=403, detail="Origin not allowed")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def check_csrf(request: Request) -> None:
|
||||||
|
token = request.headers.get("x-csrf-token")
|
||||||
|
if token != CSRF_TOKEN:
|
||||||
|
raise HTTPException(status_code=403, detail="Invalid CSRF token")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def perms_string(mode: int) -> str:
|
||||||
|
return stat.filemode(mode)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def can_preview_text(path: Path) -> bool:
|
||||||
|
if path.is_dir():
|
||||||
|
return False
|
||||||
|
if path.stat().st_size > MAX_TEXT_PREVIEW_BYTES:
|
||||||
|
return False
|
||||||
|
mime, _ = mimetypes.guess_type(path.name)
|
||||||
|
if mime and (
|
||||||
|
mime.startswith("text/")
|
||||||
|
or mime in {"application/json", "application/xml", "application/javascript"}
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
return path.suffix.lower() in TEXT_SUFFIXES
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def entry_payload(path: Path) -> dict:
|
||||||
|
st = path.lstat()
|
||||||
|
kind = "directory" if path.is_dir() else "file"
|
||||||
|
mime, _ = mimetypes.guess_type(path.name)
|
||||||
|
return {
|
||||||
|
"name": path.name,
|
||||||
|
"rel_path": rel_from_home(path),
|
||||||
|
"parent_rel_path": rel_from_home(path.parent),
|
||||||
|
"kind": kind,
|
||||||
|
"is_symlink": path.is_symlink(),
|
||||||
|
"size": st.st_size,
|
||||||
|
"modified": datetime.fromtimestamp(st.st_mtime).isoformat(timespec="seconds"),
|
||||||
|
"mime": mime or "application/octet-stream",
|
||||||
|
"perms": perms_string(st.st_mode),
|
||||||
|
"can_preview_text": can_preview_text(path) if path.is_file() else False,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def sorted_entries(path: Path, show_hidden: bool = False) -> list[dict]:
|
||||||
|
try:
|
||||||
|
children = list(path.iterdir())
|
||||||
|
except PermissionError as exc:
|
||||||
|
raise HTTPException(status_code=403, detail="Permission denied by operating system") from exc
|
||||||
|
filtered = []
|
||||||
|
for child in children:
|
||||||
|
if not show_hidden and child.name.startswith('.'):
|
||||||
|
continue
|
||||||
|
filtered.append(child)
|
||||||
|
filtered.sort(key=lambda p: (not p.is_dir(), p.name.lower()))
|
||||||
|
return [entry_payload(child) for child in filtered]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def move_to_trash(path: Path) -> Path:
|
||||||
|
TRASH_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
target = TRASH_DIR / path.name
|
||||||
|
if target.exists():
|
||||||
|
target = TRASH_DIR / f"{target.stem}-{int(time.time())}{target.suffix}"
|
||||||
|
shutil.move(str(path), str(target))
|
||||||
|
return target
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def copy_entry(source: Path, destination_dir: Path) -> Path:
|
||||||
|
destination = destination_dir / source.name
|
||||||
|
if destination.exists():
|
||||||
|
raise HTTPException(status_code=409, detail=f"Destination already exists: {destination.name}")
|
||||||
|
if source.is_dir():
|
||||||
|
shutil.copytree(source, destination, symlinks=True)
|
||||||
|
else:
|
||||||
|
shutil.copy2(source, destination, follow_symlinks=False)
|
||||||
|
return destination
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def move_entry(source: Path, destination_dir: Path) -> Path:
|
||||||
|
destination = destination_dir / source.name
|
||||||
|
if destination.exists():
|
||||||
|
raise HTTPException(status_code=409, detail=f"Destination already exists: {destination.name}")
|
||||||
|
shutil.move(str(source), str(destination))
|
||||||
|
return destination
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def select_paths_or_current(paths: list[str], cwd: str) -> list[Path]:
|
||||||
|
result = [resolve_user_path(p) for p in paths]
|
||||||
|
if not result:
|
||||||
|
raise HTTPException(status_code=400, detail="No paths selected")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_from_cwd(cwd_path: Path, raw: str, *, must_exist: bool = True) -> Path:
|
||||||
|
raw = (raw or "").strip()
|
||||||
|
candidate = (cwd_path / raw).resolve(strict=False)
|
||||||
|
candidate = ensure_within_home(candidate)
|
||||||
|
if must_exist and not candidate.exists():
|
||||||
|
raise HTTPException(status_code=404, detail="Path not found")
|
||||||
|
return candidate
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(command: str, cwd: str) -> dict:
|
||||||
|
command = (command or "").strip()
|
||||||
|
if not command:
|
||||||
|
raise HTTPException(status_code=400, detail="Empty command")
|
||||||
|
cwd_path = resolve_user_path(cwd)
|
||||||
|
if not cwd_path.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="CWD is not a directory")
|
||||||
|
|
||||||
|
parts = command.split()
|
||||||
|
verb = parts[0].lower()
|
||||||
|
args = parts[1:]
|
||||||
|
|
||||||
|
if verb == "cd":
|
||||||
|
raw_target = " ".join(args) if args else ""
|
||||||
|
target = resolve_user_path(raw_target) if raw_target.startswith("/") else resolve_from_cwd(cwd_path, raw_target or ".")
|
||||||
|
if not target.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Target is not a directory")
|
||||||
|
return {"ok": True, "action": "cd", "cwd": rel_from_home(target), "message": str(target)}
|
||||||
|
|
||||||
|
if verb == "mkdir":
|
||||||
|
name = sanitize_name(" ".join(args))
|
||||||
|
target = resolve_from_cwd(cwd_path, name, must_exist=False)
|
||||||
|
target.mkdir(exist_ok=False)
|
||||||
|
return {"ok": True, "action": "mkdir", "cwd": rel_from_home(cwd_path), "message": f"Created {name}"}
|
||||||
|
|
||||||
|
if verb == "touch":
|
||||||
|
name = sanitize_name(" ".join(args))
|
||||||
|
target = resolve_from_cwd(cwd_path, name, must_exist=False)
|
||||||
|
target.touch(exist_ok=False)
|
||||||
|
return {"ok": True, "action": "touch", "cwd": rel_from_home(cwd_path), "message": f"Created {name}"}
|
||||||
|
|
||||||
|
if verb == "select":
|
||||||
|
pattern = " ".join(args).strip() or "*"
|
||||||
|
entries = sorted_entries(cwd_path, show_hidden=True)
|
||||||
|
matches = [e["rel_path"] for e in entries if fnmatch.fnmatch(e["name"], pattern)]
|
||||||
|
return {
|
||||||
|
"ok": True,
|
||||||
|
"action": "select",
|
||||||
|
"cwd": rel_from_home(cwd_path),
|
||||||
|
"message": f"Matched {len(matches)} item(s)",
|
||||||
|
"matches": matches,
|
||||||
|
}
|
||||||
|
|
||||||
|
if verb == "help":
|
||||||
|
return {
|
||||||
|
"ok": True,
|
||||||
|
"action": "help",
|
||||||
|
"cwd": rel_from_home(cwd_path),
|
||||||
|
"message": "Commands: cd <path>, mkdir <name>, touch <name>, select <glob>, help",
|
||||||
|
}
|
||||||
|
|
||||||
|
raise HTTPException(status_code=400, detail="Unsupported command")
|
||||||
|
|
||||||
|
|
||||||
|
@app.middleware("http")
|
||||||
|
async def harden_headers(request: Request, call_next):
|
||||||
|
response = await call_next(request)
|
||||||
|
response.headers["X-Frame-Options"] = "DENY"
|
||||||
|
response.headers["Content-Security-Policy"] = (
|
||||||
|
"default-src 'self'; img-src 'self' data:; style-src 'self'; script-src 'self'; "
|
||||||
|
"connect-src 'self'; frame-ancestors 'none'; base-uri 'self'; form-action 'self'"
|
||||||
|
)
|
||||||
|
response.headers["Referrer-Policy"] = "no-referrer"
|
||||||
|
response.headers["X-Content-Type-Options"] = "nosniff"
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
def health() -> dict:
|
||||||
|
return {"ok": True, "app": APP_NAME, "time": _now_iso(), "home": str(HOME_ROOT)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/", response_class=HTMLResponse)
|
||||||
|
def index(request: Request):
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
request,
|
||||||
|
"index.html",
|
||||||
|
{
|
||||||
|
"app_name": APP_NAME,
|
||||||
|
"home_root": str(HOME_ROOT),
|
||||||
|
"csrf_token": CSRF_TOKEN,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/list")
|
||||||
|
def api_list(path: str = "", show_hidden: bool = False) -> dict:
|
||||||
|
target = resolve_user_path(path)
|
||||||
|
if not target.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Path is not a directory")
|
||||||
|
return {
|
||||||
|
"cwd": rel_from_home(target),
|
||||||
|
"absolute": str(target),
|
||||||
|
"parent": "" if target == HOME_ROOT else rel_from_home(target.parent),
|
||||||
|
"entries": sorted_entries(target, show_hidden=show_hidden),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/read")
|
||||||
|
def api_read(path: str) -> dict:
|
||||||
|
target = resolve_user_path(path)
|
||||||
|
if target.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Cannot read a directory as text")
|
||||||
|
if not can_preview_text(target):
|
||||||
|
raise HTTPException(status_code=415, detail="File is not previewable as text")
|
||||||
|
try:
|
||||||
|
content = target.read_text(encoding="utf-8")
|
||||||
|
encoding = "utf-8"
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
content = target.read_text(encoding="utf-8", errors="replace")
|
||||||
|
encoding = "utf-8 (lossy)"
|
||||||
|
return {
|
||||||
|
"path": rel_from_home(target),
|
||||||
|
"name": target.name,
|
||||||
|
"encoding": encoding,
|
||||||
|
"content": content,
|
||||||
|
"size": target.stat().st_size,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/meta")
|
||||||
|
def api_meta(path: str) -> dict:
|
||||||
|
target = resolve_user_path(path)
|
||||||
|
payload = entry_payload(target)
|
||||||
|
payload["absolute"] = str(target)
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/download")
|
||||||
|
def api_download(path: str):
|
||||||
|
target = resolve_user_path(path)
|
||||||
|
if target.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Cannot download a directory")
|
||||||
|
return FileResponse(path=target, filename=target.name)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/preview")
|
||||||
|
def api_preview(path: str):
|
||||||
|
target = resolve_user_path(path)
|
||||||
|
if target.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Cannot preview a directory")
|
||||||
|
mime, _ = mimetypes.guess_type(target.name)
|
||||||
|
if not mime or not mime.startswith("image/"):
|
||||||
|
raise HTTPException(status_code=415, detail="Preview only supports images")
|
||||||
|
return FileResponse(path=target, media_type=mime)
|
||||||
|
|
||||||
|
|
||||||
|
@app.put("/api/write")
|
||||||
|
async def api_write(request: Request, path: str = Form(...), content: str = Form(...)) -> dict:
|
||||||
|
check_origin(request)
|
||||||
|
check_csrf(request)
|
||||||
|
target = resolve_user_path(path, must_exist=False)
|
||||||
|
ensure_within_home(target.parent.resolve(strict=False))
|
||||||
|
if target.exists() and target.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Cannot overwrite a directory")
|
||||||
|
target.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
tmp = target.with_name(target.name + ".tmp-write")
|
||||||
|
tmp.write_text(content, encoding="utf-8")
|
||||||
|
os.replace(tmp, target)
|
||||||
|
return {"ok": True, "path": rel_from_home(target)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/mkdir")
|
||||||
|
async def api_mkdir(request: Request, path: str = Form(...), name: str = Form(...)) -> dict:
|
||||||
|
check_origin(request)
|
||||||
|
check_csrf(request)
|
||||||
|
base = resolve_user_path(path)
|
||||||
|
if not base.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Base path is not a directory")
|
||||||
|
child = resolve_user_path(str(Path(rel_from_home(base)) / sanitize_name(name)), must_exist=False)
|
||||||
|
child.mkdir(parents=False, exist_ok=False)
|
||||||
|
return {"ok": True, "path": rel_from_home(child)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/upload")
|
||||||
|
async def api_upload(request: Request, path: str = Form(...), files: list[UploadFile] = File(...)) -> dict:
|
||||||
|
check_origin(request)
|
||||||
|
check_csrf(request)
|
||||||
|
base = resolve_user_path(path)
|
||||||
|
if not base.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Upload target is not a directory")
|
||||||
|
saved: list[str] = []
|
||||||
|
for upload in files:
|
||||||
|
filename = Path(upload.filename or "").name
|
||||||
|
if not filename:
|
||||||
|
continue
|
||||||
|
destination = resolve_user_path(str(Path(rel_from_home(base)) / filename), must_exist=False)
|
||||||
|
with destination.open("wb") as f:
|
||||||
|
while chunk := await upload.read(1024 * 1024):
|
||||||
|
f.write(chunk)
|
||||||
|
saved.append(rel_from_home(destination))
|
||||||
|
return {"ok": True, "saved": saved}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/rename")
|
||||||
|
async def api_rename(request: Request, payload: RenamePayload) -> dict:
|
||||||
|
check_origin(request)
|
||||||
|
check_csrf(request)
|
||||||
|
source = resolve_user_path(payload.path)
|
||||||
|
destination = resolve_user_path(str(Path(rel_from_home(source.parent)) / sanitize_name(payload.new_name)), must_exist=False)
|
||||||
|
if destination.exists():
|
||||||
|
raise HTTPException(status_code=409, detail="Destination already exists")
|
||||||
|
os.replace(source, destination)
|
||||||
|
return {"ok": True, "old_path": rel_from_home(source), "new_path": rel_from_home(destination)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/copy")
|
||||||
|
async def api_copy(request: Request, payload: PathsPayload) -> dict:
|
||||||
|
check_origin(request)
|
||||||
|
check_csrf(request)
|
||||||
|
if payload.destination_dir is None:
|
||||||
|
raise HTTPException(status_code=400, detail="Missing destination_dir")
|
||||||
|
destination_dir = resolve_user_path(payload.destination_dir)
|
||||||
|
if not destination_dir.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Destination is not a directory")
|
||||||
|
results = []
|
||||||
|
for source in select_paths_or_current(payload.paths, payload.destination_dir):
|
||||||
|
copied = copy_entry(source, destination_dir)
|
||||||
|
results.append(rel_from_home(copied))
|
||||||
|
return {"ok": True, "copied": results}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/move")
|
||||||
|
async def api_move(request: Request, payload: PathsPayload) -> dict:
|
||||||
|
check_origin(request)
|
||||||
|
check_csrf(request)
|
||||||
|
if payload.destination_dir is None:
|
||||||
|
raise HTTPException(status_code=400, detail="Missing destination_dir")
|
||||||
|
destination_dir = resolve_user_path(payload.destination_dir)
|
||||||
|
if not destination_dir.is_dir():
|
||||||
|
raise HTTPException(status_code=400, detail="Destination is not a directory")
|
||||||
|
results = []
|
||||||
|
for source in select_paths_or_current(payload.paths, payload.destination_dir):
|
||||||
|
moved = move_entry(source, destination_dir)
|
||||||
|
results.append(rel_from_home(moved))
|
||||||
|
return {"ok": True, "moved": results}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/delete")
|
||||||
|
async def api_delete(request: Request, payload: DeletePayload) -> dict:
|
||||||
|
check_origin(request)
|
||||||
|
check_csrf(request)
|
||||||
|
paths = select_paths_or_current(payload.paths, "")
|
||||||
|
deleted = []
|
||||||
|
for target in paths:
|
||||||
|
if target == HOME_ROOT:
|
||||||
|
raise HTTPException(status_code=400, detail="Refusing to delete home root")
|
||||||
|
if payload.mode == "trash":
|
||||||
|
moved = move_to_trash(target)
|
||||||
|
deleted.append(str(moved))
|
||||||
|
else:
|
||||||
|
if target.is_dir():
|
||||||
|
shutil.rmtree(target)
|
||||||
|
else:
|
||||||
|
target.unlink()
|
||||||
|
deleted.append(rel_from_home(target))
|
||||||
|
return {"ok": True, "mode": payload.mode, "deleted": deleted}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/command")
|
||||||
|
async def api_command(request: Request, payload: CommandPayload) -> dict:
|
||||||
|
check_origin(request)
|
||||||
|
check_csrf(request)
|
||||||
|
return run_command(payload.command, payload.cwd)
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(HTTPException)
|
||||||
|
async def http_exception_handler(_: Request, exc: HTTPException):
|
||||||
|
return JSONResponse(status_code=exc.status_code, content={"ok": False, "detail": exc.detail})
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(Exception)
|
||||||
|
async def unhandled_exception_handler(_: Request, exc: Exception):
|
||||||
|
return JSONResponse(status_code=500, content={"ok": False, "detail": html.escape(str(exc))})
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
fastapi>=0.128.8,<1.0
|
||||||
|
uvicorn>=0.39,<1.0
|
||||||
|
jinja2>=3.1.6,<4.0
|
||||||
|
python-multipart>=0.0.22,<1.0
|
||||||
Executable
+27
@@ -0,0 +1,27 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
cd "$(dirname "$0")"
|
||||||
|
|
||||||
|
if command -v python3.14 >/dev/null 2>&1; then
|
||||||
|
PYTHON_BIN=python3.14
|
||||||
|
else
|
||||||
|
PYTHON_BIN=python3
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Using Python: $($PYTHON_BIN --version 2>&1)"
|
||||||
|
|
||||||
|
TARGET_MM=$("$PYTHON_BIN" -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")')
|
||||||
|
CURRENT_MM=""
|
||||||
|
if [ -x .venv/bin/python ]; then
|
||||||
|
CURRENT_MM=$(.venv/bin/python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")')
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -d .venv ] || [ "$CURRENT_MM" != "$TARGET_MM" ]; then
|
||||||
|
rm -rf .venv
|
||||||
|
"$PYTHON_BIN" -m venv .venv
|
||||||
|
fi
|
||||||
|
|
||||||
|
source .venv/bin/activate
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
python -m pip install -r requirements.txt
|
||||||
|
exec python -m uvicorn app.main:app --host 127.0.0.1 --port 8765
|
||||||
@@ -54,9 +54,12 @@ Success:
|
|||||||
Conflict (`already_exists`) + invalid name (`invalid_request`) gebruiken dezelfde error-shape als mkdir.
|
Conflict (`already_exists`) + invalid name (`invalid_request`) gebruiken dezelfde error-shape als mkdir.
|
||||||
|
|
||||||
### `POST /api/files/delete`
|
### `POST /api/files/delete`
|
||||||
Success:
|
Success (202):
|
||||||
```json
|
```json
|
||||||
{ "path": "storage1/parent/file_or_empty_dir" }
|
{
|
||||||
|
"task_id": "<uuid>",
|
||||||
|
"status": "queued"
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Non-empty directory:
|
Non-empty directory:
|
||||||
@@ -74,6 +77,7 @@ Non-empty directory:
|
|||||||
|
|
||||||
### `POST /api/files/copy`
|
### `POST /api/files/copy`
|
||||||
### `POST /api/files/move`
|
### `POST /api/files/move`
|
||||||
|
### `POST /api/files/delete`
|
||||||
Success (202):
|
Success (202):
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -82,6 +86,13 @@ Success (202):
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- Batch move is supported as one task-based operation via `{ "sources": [...], "destination_base": "..." }`.
|
||||||
|
- Cross-root batch move is supported for file-only selections.
|
||||||
|
- Cross-root batch move with any directory in the selection remains unsupported in v1.
|
||||||
|
- Batch delete is supported as one task-based operation via `{ "paths": [...], "recursive_paths": [...] }`.
|
||||||
|
- Single delete remains supported via `{ "path": "...", "recursive": true|false }`.
|
||||||
|
|
||||||
## Tasks read endpoints
|
## Tasks read endpoints
|
||||||
|
|
||||||
### `GET /api/tasks`
|
### `GET /api/tasks`
|
||||||
@@ -125,6 +136,53 @@ Response shape:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Voor task-based file-actions `copy`, `move`, `duplicate` en `delete` betekenen progressvelden:
|
||||||
|
- `done_items`: aantal volledig verwerkte bestanden
|
||||||
|
- `total_items`: exact aantal te verwerken bestanden in de hele task
|
||||||
|
- `current_item`: taakrelatief bestandspad als beschikbaar, anders bestandsnaam
|
||||||
|
|
||||||
|
Voor `move` geldt een expliciete uitzondering:
|
||||||
|
- file-gebaseerde move-paden rapporteren file-progress
|
||||||
|
- same-root directory moves behouden directe rename-semantiek en rapporteren daarom grovere item-progress per directory-operatie
|
||||||
|
|
||||||
|
Voor `delete` geldt:
|
||||||
|
- recursive delete van directorytrees rapporteert file-progress per verwijderd bestand
|
||||||
|
- lege mappen of directory-only deletes houden `done_items = 0`, `total_items = 0` en gebruiken geen kunstmatige file-teller
|
||||||
|
|
||||||
|
### `POST /api/tasks/{task_id}/cancel`
|
||||||
|
Success for cancellable file-action task:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "<uuid>",
|
||||||
|
"operation": "copy",
|
||||||
|
"status": "cancelling",
|
||||||
|
"source": "2 items",
|
||||||
|
"destination": "storage1/dest",
|
||||||
|
"done_bytes": null,
|
||||||
|
"total_bytes": null,
|
||||||
|
"done_items": 0,
|
||||||
|
"total_items": 2,
|
||||||
|
"current_item": "storage1/a.txt",
|
||||||
|
"failed_item": null,
|
||||||
|
"error_code": null,
|
||||||
|
"error_message": null,
|
||||||
|
"created_at": "2026-03-10T10:00:00Z",
|
||||||
|
"started_at": "2026-03-10T10:00:01Z",
|
||||||
|
"finished_at": null
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Not cancellable:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "task_not_cancellable",
|
||||||
|
"message": "Task cannot be cancelled",
|
||||||
|
"details": { "task_id": "<uuid>", "status": "completed" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
Task not found:
|
Task not found:
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -0,0 +1,247 @@
|
|||||||
|
# Built-in Themes v1.1
|
||||||
|
|
||||||
|
## 1. Theme families voor v1
|
||||||
|
Aanbevolen built-in theme families voor v1.1:
|
||||||
|
- `default`
|
||||||
|
- `macos-soft`
|
||||||
|
- `midnight`
|
||||||
|
- `graphite`
|
||||||
|
- `windows11`
|
||||||
|
|
||||||
|
Deze set is klein genoeg om beheersbaar te blijven en groot genoeg om visueel zinvolle keuze te bieden zonder een theme-explosie te veroorzaken.
|
||||||
|
|
||||||
|
## 2. LCARS
|
||||||
|
`lcars` hoort expliciet niet in v1.1.
|
||||||
|
|
||||||
|
Aanbevolen behandeling:
|
||||||
|
- niet opnemen in deze slice
|
||||||
|
- later als aparte v2 onderzoeken
|
||||||
|
|
||||||
|
Reden:
|
||||||
|
- `lcars` is visueel extreem uitgesproken
|
||||||
|
- hoger risico op regressie in leesbaarheid, functiebalkgebruik, row states, modals en paneelcontrast
|
||||||
|
- grotere kans dat componentstructuur visueel gaat knellen tegen de huidige dual-pane workflow
|
||||||
|
|
||||||
|
Conclusie:
|
||||||
|
- `lcars` beter behandelen als aparte latere theme-slice met eigen UX-validatie
|
||||||
|
|
||||||
|
## 3. Theme-model
|
||||||
|
Het bestaande model blijft leidend:
|
||||||
|
- `selected_theme` = theme family
|
||||||
|
- `selected_color_mode` = `dark` of `light`
|
||||||
|
|
||||||
|
Beide blijven persistent opgeslagen in SQLite settings.
|
||||||
|
|
||||||
|
Voorbeelden van effectieve combinaties:
|
||||||
|
- `default-dark`
|
||||||
|
- `default-light`
|
||||||
|
- `macos-soft-dark`
|
||||||
|
- `macos-soft-light`
|
||||||
|
- `midnight-dark`
|
||||||
|
- `graphite-light`
|
||||||
|
- `windows11-dark`
|
||||||
|
|
||||||
|
Belangrijk:
|
||||||
|
- theme family en color mode blijven twee gescheiden concepten
|
||||||
|
- de hoofdinterface-toggle blijft alleen `selected_color_mode` wisselen
|
||||||
|
- `Settings > Interface` blijft alleen `selected_theme` beheren
|
||||||
|
|
||||||
|
## 4. CSS-architectuur
|
||||||
|
Voorkeursrichting voor v1.1:
|
||||||
|
- één gedeelde `base.css` voor:
|
||||||
|
- layout
|
||||||
|
- spacing
|
||||||
|
- componentstructuur
|
||||||
|
- modals
|
||||||
|
- panelen
|
||||||
|
- functiebalk
|
||||||
|
- lijst/tabelstructuur
|
||||||
|
- algemene componentbasis
|
||||||
|
- aparte CSS-bestanden per theme-family:
|
||||||
|
- `theme-default.css`
|
||||||
|
- `theme-macos-soft.css`
|
||||||
|
- `theme-midnight.css`
|
||||||
|
- `theme-graphite.css`
|
||||||
|
- `theme-windows11.css`
|
||||||
|
- light/dark binnen dezelfde family geregeld via selectors of tokens
|
||||||
|
|
||||||
|
Dus expliciet niet:
|
||||||
|
- één enorm all-in-one CSS-bestand met alles door elkaar
|
||||||
|
- per `theme+mode` een volledig apart layoutbestand
|
||||||
|
|
||||||
|
Aanbevolen structuur:
|
||||||
|
- `base.css`
|
||||||
|
- `theme-default.css`
|
||||||
|
- `theme-macos-soft.css`
|
||||||
|
- `theme-midnight.css`
|
||||||
|
- `theme-graphite.css`
|
||||||
|
- `theme-windows11.css`
|
||||||
|
|
||||||
|
Binnen elk theme-family bestand:
|
||||||
|
- alleen tokens en beperkte theme-afwerking
|
||||||
|
- geen duplicatie van layoutregels
|
||||||
|
- geen alternatieve componentstructuur
|
||||||
|
|
||||||
|
Voorbeeldselector-richting:
|
||||||
|
- `:root[data-theme-family="macos-soft"][data-color-mode="dark"] { ... }`
|
||||||
|
- `:root[data-theme-family="macos-soft"][data-color-mode="light"] { ... }`
|
||||||
|
|
||||||
|
Of equivalent via custom properties.
|
||||||
|
|
||||||
|
## 5. Waarom deze architectuur onderhoudbaarder is
|
||||||
|
Deze architectuur is onderhoudbaarder omdat hij drie dingen schoon van elkaar scheidt:
|
||||||
|
- wat de UI is: layout en componentstructuur in `base.css`
|
||||||
|
- welke stijl-family actief is: family-bestand
|
||||||
|
- welke kleurmodus actief is: dark/light tokens binnen die family
|
||||||
|
|
||||||
|
Voordelen:
|
||||||
|
- layout-CSS staat op één plek
|
||||||
|
- theme-bestanden blijven klein en thematisch leesbaar
|
||||||
|
- regressies zijn makkelijker te isoleren per family
|
||||||
|
- nieuwe family toevoegen vereist minder risico op breken van bestaande layout
|
||||||
|
- dark/light binnen één family blijft samenhangend beheerd
|
||||||
|
|
||||||
|
Hoe duplicatie van layout-CSS voorkomen wordt:
|
||||||
|
- `base.css` bevat alle structurele regels
|
||||||
|
- family-bestanden overschrijven alleen tokens en kleine visuele accenten
|
||||||
|
- geen herhaling van paneelgrid, modal layout, functiebalkstructuur of lijstlayout per family
|
||||||
|
|
||||||
|
Hoe theme switching schoon blijft:
|
||||||
|
- frontend hoeft alleen `selected_theme` en `selected_color_mode` te lezen
|
||||||
|
- die waarden vertalen naar theme-attributen op `document.documentElement`
|
||||||
|
- CSS doet de rest via selectors/custom properties
|
||||||
|
- geen runtime CSS-generatie nodig
|
||||||
|
- geen vrije bestandsselectie
|
||||||
|
- geen ingewikkelde asset-resolutie
|
||||||
|
|
||||||
|
## 6. Visuele richting per theme
|
||||||
|
Alle themes behouden exact dezelfde layout en informatiearchitectuur. Alleen visuele stijl verschilt.
|
||||||
|
|
||||||
|
### `default`
|
||||||
|
Doel:
|
||||||
|
- neutrale baseline
|
||||||
|
- functioneel
|
||||||
|
- rustig
|
||||||
|
- compact
|
||||||
|
|
||||||
|
Karakter:
|
||||||
|
- de huidige standaardstijl, iets verfijnd maar niet uitgesproken
|
||||||
|
|
||||||
|
### `macos-soft`
|
||||||
|
Doel:
|
||||||
|
- zacht
|
||||||
|
- verfijnd
|
||||||
|
- vriendelijk
|
||||||
|
|
||||||
|
Karakter:
|
||||||
|
- subtiele surfaces
|
||||||
|
- zachte grijstinten
|
||||||
|
- lichte premium desktop-app indruk
|
||||||
|
- iets vriendelijkere rounding/shadows, zonder layout te veranderen
|
||||||
|
|
||||||
|
### `midnight`
|
||||||
|
Doel:
|
||||||
|
- donker
|
||||||
|
- gefocust
|
||||||
|
- rustig
|
||||||
|
|
||||||
|
Karakter:
|
||||||
|
- diepere donkere panelen
|
||||||
|
- koele accenten
|
||||||
|
- sterke maar nette current/selected contrasten
|
||||||
|
- geschikt voor langdurig gebruik in dark mode
|
||||||
|
|
||||||
|
### `graphite`
|
||||||
|
Doel:
|
||||||
|
- sober
|
||||||
|
- professioneel
|
||||||
|
- bijna monochroom
|
||||||
|
|
||||||
|
Karakter:
|
||||||
|
- grijs-gedreven palette
|
||||||
|
- minimale accentkleur
|
||||||
|
- contrast via luminantie in plaats van felle tinten
|
||||||
|
|
||||||
|
### `windows11`
|
||||||
|
Doel:
|
||||||
|
- helder
|
||||||
|
- modern
|
||||||
|
- clean desktop-app gevoel
|
||||||
|
|
||||||
|
Karakter:
|
||||||
|
- lichtere surfaces
|
||||||
|
- subtiele border/surface scheiding
|
||||||
|
- iets luchtiger accentgebruik
|
||||||
|
- behoud van compacte file-manager ergonomie
|
||||||
|
|
||||||
|
## 7. Settings UI
|
||||||
|
`Settings > Interface` toont een dropdown/select met de theme families:
|
||||||
|
- `default`
|
||||||
|
- `macos-soft`
|
||||||
|
- `midnight`
|
||||||
|
- `graphite`
|
||||||
|
- `windows11`
|
||||||
|
|
||||||
|
Dark/light blijft via de bestaande toggle in de hoofdinterface.
|
||||||
|
|
||||||
|
Dus:
|
||||||
|
- Interface tab = keuze van style family
|
||||||
|
- Main interface toggle = keuze van color mode
|
||||||
|
|
||||||
|
Geen extra theme-complexiteit in v1.1:
|
||||||
|
- geen preview gallery
|
||||||
|
- geen import/export
|
||||||
|
- geen vrije CSS-keuze
|
||||||
|
- geen uploads
|
||||||
|
|
||||||
|
## 8. Backend-impact
|
||||||
|
Backend-aanpassing blijft klein:
|
||||||
|
- whitelist van `selected_theme` uitbreiden met:
|
||||||
|
- `default`
|
||||||
|
- `macos-soft`
|
||||||
|
- `midnight`
|
||||||
|
- `graphite`
|
||||||
|
- `windows11`
|
||||||
|
- `selected_color_mode` blijft bestaan zoals nu
|
||||||
|
- geen vrije css-bestandskeuze
|
||||||
|
- geen uploadmechanisme
|
||||||
|
- geen nieuwe dependencies
|
||||||
|
|
||||||
|
De bestaande settings-opslag en settings-API kunnen verder hetzelfde model blijven gebruiken.
|
||||||
|
|
||||||
|
## 9. Regressierisico
|
||||||
|
Belangrijkste risico’s:
|
||||||
|
- leesbaarheid per theme/mode
|
||||||
|
- current row / selected row contrast te zwak of te hard
|
||||||
|
- modals die in bepaalde families te vlak worden
|
||||||
|
- functiebalk die visueel wegvalt
|
||||||
|
- editor/viewers die niet goed mee themen
|
||||||
|
- thumbnail/icon-slot met te weinig contrast
|
||||||
|
- CSS-fragmentatie als family-bestanden toch structurele regels gaan bevatten
|
||||||
|
- duplicatie als layout-regels alsnog in family-bestanden belanden
|
||||||
|
|
||||||
|
Belangrijk mitigatieprincipe:
|
||||||
|
- family-bestanden beperken tot visuele tokens en kleine afwerkingsregels
|
||||||
|
- geen layout-overrides per family
|
||||||
|
- consistent regressietesten van dezelfde states in alle families
|
||||||
|
|
||||||
|
## 10. Aanbeveling
|
||||||
|
Aanbevolen richting voor deze app:
|
||||||
|
- ja, `base.css` + aparte CSS per theme-family is de juiste richting
|
||||||
|
|
||||||
|
Waarom:
|
||||||
|
- laag regressierisico
|
||||||
|
- duidelijke scheiding tussen structuur en uiterlijk
|
||||||
|
- onderhoudbaar bij toekomstige uitbreiding
|
||||||
|
- sluit aan op het bestaande model van `selected_theme` + `selected_color_mode`
|
||||||
|
- voorkomt zowel een gigantisch all-in-one stylesheet als onnodige duplicatie per `theme+mode`
|
||||||
|
|
||||||
|
Expliciete aanbevelingen:
|
||||||
|
- gebruik gedeelde basis-CSS voor layout/componentstructuur
|
||||||
|
- gebruik aparte CSS per theme-family
|
||||||
|
- regel dark/light binnen dezelfde family via selectors/tokens
|
||||||
|
- behandel `lcars` expliciet als aparte latere slice
|
||||||
|
|
||||||
|
Conclusie:
|
||||||
|
- deze architectuur is de juiste basis voor built-in themes in deze app
|
||||||
|
- `lcars` moet niet worden meegetrokken in v1.1, maar apart worden ontworpen en gevalideerd
|
||||||
@@ -0,0 +1,284 @@
|
|||||||
|
# Built-in Themes v1
|
||||||
|
|
||||||
|
## 1. Doel
|
||||||
|
Built-in themes voegen nu waarde toe omdat de webui functioneel volwassen genoeg is om visuele voorkeuren relevant te maken zonder dat de workflow eerst nog instabiel is. De huidige app heeft al een vaste dual-pane structuur, modals, functiebalk, viewers en editorflow. Dat maakt het logisch om stijlvarianten toe te voegen zolang de interactie en informatiearchitectuur gelijk blijven.
|
||||||
|
|
||||||
|
Dit past goed binnen de bestaande `Settings > Interface` structuur, omdat theme-keuze daar een stabiele, globale UI-voorkeur is. De bestaande scheiding tussen theme-family en dark/light mode blijft daarbij bruikbaar:
|
||||||
|
- `selected_theme` = stijlset / family
|
||||||
|
- `selected_color_mode` = `dark` of `light` binnen die family
|
||||||
|
|
||||||
|
## 2. Scope
|
||||||
|
Built-in theme families voor v1:
|
||||||
|
- `default`
|
||||||
|
- `macos-soft`
|
||||||
|
- `midnight`
|
||||||
|
- `graphite`
|
||||||
|
- `windows11`
|
||||||
|
|
||||||
|
Expliciet niet in v1:
|
||||||
|
- `lcars`
|
||||||
|
- vrije theme-bestanden
|
||||||
|
- upload of filesystem picker
|
||||||
|
- layoutvarianten per theme
|
||||||
|
- component-specifieke thema-engine buiten CSS tokens/selectors
|
||||||
|
|
||||||
|
`lcars` hoort beter in een latere v2-slice. Reden: het is visueel extreem uitgesproken, legt druk op contrast, spacing, functiebalkleesbaarheid en waarschijnlijk ook op de dual-pane ergonomie. Dat is een hoger UX-risico dan de rustige families hierboven.
|
||||||
|
|
||||||
|
## 3. Theme-model
|
||||||
|
Het bestaande settingsmodel blijft leidend:
|
||||||
|
- `selected_theme`: theme-family key
|
||||||
|
- `selected_color_mode`: `dark` of `light`
|
||||||
|
|
||||||
|
Elke built-in family ondersteunt beide modi:
|
||||||
|
- `default-light`
|
||||||
|
- `default-dark`
|
||||||
|
- `macos-soft-light`
|
||||||
|
- `macos-soft-dark`
|
||||||
|
- `midnight-light`
|
||||||
|
- `midnight-dark`
|
||||||
|
- `graphite-light`
|
||||||
|
- `graphite-dark`
|
||||||
|
- `windows11-light`
|
||||||
|
- `windows11-dark`
|
||||||
|
|
||||||
|
De frontend combineert beide settings tot de effectieve UI-state, bijvoorbeeld via een attribuut zoals:
|
||||||
|
- `data-theme="macos-soft-light"`
|
||||||
|
- of combinatie van `data-theme-family` + `data-color-mode`
|
||||||
|
|
||||||
|
Aanbevolen voor v1: beide attributen zetten.
|
||||||
|
Reden: duidelijkere CSS-structuur en minder fragiele string-parsing in selectors.
|
||||||
|
|
||||||
|
Aanbevolen HTML-state:
|
||||||
|
- `data-theme-family="macos-soft"`
|
||||||
|
- `data-color-mode="dark"`
|
||||||
|
|
||||||
|
## 4. CSS-architectuur
|
||||||
|
Aanbevolen richting:
|
||||||
|
- een gedeelde `base.css` of equivalent voor:
|
||||||
|
- layout
|
||||||
|
- spacing
|
||||||
|
- componentstructuur
|
||||||
|
- modals
|
||||||
|
- panelen
|
||||||
|
- functiebalk
|
||||||
|
- tabel/lijststructuur
|
||||||
|
- algemene componentbasis
|
||||||
|
- aparte CSS-bestanden per theme-family:
|
||||||
|
- `theme-default.css`
|
||||||
|
- `theme-macos-soft.css`
|
||||||
|
- `theme-midnight.css`
|
||||||
|
- `theme-graphite.css`
|
||||||
|
- `theme-windows11.css`
|
||||||
|
- light/dark binnen dezelfde family regelen met selectors en tokens in dat family-bestand
|
||||||
|
|
||||||
|
Voorbeeldrichting:
|
||||||
|
- `base.css`
|
||||||
|
- `theme-default.css`
|
||||||
|
- `theme-macos-soft.css`
|
||||||
|
- `theme-midnight.css`
|
||||||
|
- `theme-graphite.css`
|
||||||
|
- `theme-windows11.css`
|
||||||
|
|
||||||
|
In elk family-bestand staan alleen tokens en beperkte theme-specifieke afwerkingen, bijvoorbeeld:
|
||||||
|
- background colors
|
||||||
|
- surface colors
|
||||||
|
- border colors
|
||||||
|
- accent colors
|
||||||
|
- selection/current row tuning
|
||||||
|
- shadow/radius tuning waar nodig
|
||||||
|
|
||||||
|
Niet in theme-bestanden:
|
||||||
|
- grid-structuur
|
||||||
|
- flex-layout van panelen
|
||||||
|
- componentmarkup-afhankelijke layoutlogica
|
||||||
|
- duplicatie van modals/paneel/functiebalk CSS
|
||||||
|
|
||||||
|
Waarom dit onderhoudbaarder is dan één groot CSS-bestand:
|
||||||
|
- theme-logica blijft per family lokaal leesbaar
|
||||||
|
- layout en componentstructuur blijven centraal
|
||||||
|
- minder kans dat een nieuwe family per ongeluk core layout overschrijft
|
||||||
|
- eenvoudiger regressietesten per family
|
||||||
|
- duidelijkere grens tussen “wat is de UI” en “hoe ziet de UI eruit”
|
||||||
|
|
||||||
|
Waarom ook niet per theme+mode volledig losse bestanden:
|
||||||
|
- te veel duplicatie
|
||||||
|
- onnodig onderhoud van dark/light varianten
|
||||||
|
- grotere kans op drift tussen light en dark binnen dezelfde family
|
||||||
|
|
||||||
|
## 5. Visuele richting per theme
|
||||||
|
Alle themes behouden exact dezelfde layout en componentstructuur. Alleen styling verschilt.
|
||||||
|
|
||||||
|
### `default`
|
||||||
|
Huidige neutrale baseline.
|
||||||
|
- rustig
|
||||||
|
- compact
|
||||||
|
- functioneel
|
||||||
|
- donkere modus als primaire baseline
|
||||||
|
- lichte modus als nette tegenhanger
|
||||||
|
|
||||||
|
### `macos-soft`
|
||||||
|
Doel: zachter, verfijnder, subtiel premium.
|
||||||
|
- lichtere surfaces
|
||||||
|
- subtiele separators
|
||||||
|
- iets zachtere contrasten
|
||||||
|
- afgeronde panelen/modals iets vriendelijker, maar niet groter
|
||||||
|
- ingetogen blauw/grijs accent
|
||||||
|
|
||||||
|
### `midnight`
|
||||||
|
Doel: donker, gefocust, licht dramatisch maar nog rustig.
|
||||||
|
- diepe donkere oppervlakken
|
||||||
|
- koele blauwe accenten
|
||||||
|
- duidelijke current row / selected row contrasten
|
||||||
|
- geschikt voor langdurig gebruik in donkere modus
|
||||||
|
|
||||||
|
### `graphite`
|
||||||
|
Doel: sober, professioneel, bijna monochroom.
|
||||||
|
- neutraal grijs systeem
|
||||||
|
- minimale accentkleur
|
||||||
|
- contrast via value shifts in plaats van kleurigheid
|
||||||
|
- goed voor gebruikers die een stille UI willen
|
||||||
|
|
||||||
|
### `windows11`
|
||||||
|
Doel: helder, modern, iets luchtiger.
|
||||||
|
- zachtere paneelsurfaces
|
||||||
|
- subtiele border+surface lagen
|
||||||
|
- lichtblauw accent
|
||||||
|
- iets meer "clean desktop app" gevoel zonder de layout te veranderen
|
||||||
|
|
||||||
|
## 6. Settings UI
|
||||||
|
`Settings > Interface` toont een dropdown/select met alleen de built-in theme families:
|
||||||
|
- `default`
|
||||||
|
- `macos-soft`
|
||||||
|
- `midnight`
|
||||||
|
- `graphite`
|
||||||
|
- `windows11`
|
||||||
|
|
||||||
|
Dark/light blijft in de hoofdinterface via de bestaande toggle.
|
||||||
|
Die toggle blijft dus een snelle dagelijkse keuze voor kleurmodus, niet voor theme-family.
|
||||||
|
|
||||||
|
Geen extra complexiteit in v1:
|
||||||
|
- geen preview gallery
|
||||||
|
- geen screenshot previews
|
||||||
|
- geen themetekstblokken met uitgebreide beschrijvingen
|
||||||
|
- geen extra subinstellingen per theme
|
||||||
|
|
||||||
|
## 7. Backend-impact
|
||||||
|
Backend-aanpassing is beperkt:
|
||||||
|
- whitelist voor `selected_theme` uitbreiden van alleen `default` naar:
|
||||||
|
- `default`
|
||||||
|
- `macos-soft`
|
||||||
|
- `midnight`
|
||||||
|
- `graphite`
|
||||||
|
- `windows11`
|
||||||
|
- `selected_color_mode` blijft:
|
||||||
|
- `dark`
|
||||||
|
- `light`
|
||||||
|
- settings-opslagmodel blijft verder gelijk
|
||||||
|
- geen nieuwe dependency
|
||||||
|
- geen vrije filesystemtoegang
|
||||||
|
|
||||||
|
Dit is laag risico omdat alleen validatie van settings-uitbreiding nodig is; de settings-API en SQLite-opslag bestaan al.
|
||||||
|
|
||||||
|
## 8. Frontend-impact
|
||||||
|
Aanbevolen organisatie:
|
||||||
|
- `base.css` blijft altijd geladen
|
||||||
|
- alle family-bestanden worden ook geladen, maar zijn strikt gescoped op theme selectors
|
||||||
|
- of dynamisch geladen/swapped, als dat later nodig blijkt
|
||||||
|
|
||||||
|
Aanbevolen v1-richting: alle theme CSS-bestanden statisch laden, maar strikt scopen.
|
||||||
|
Reden:
|
||||||
|
- eenvoudiger startup
|
||||||
|
- minder runtime asset-wisselcomplexiteit
|
||||||
|
- minder kans op flash of incomplete styling
|
||||||
|
- aanvaardbaar zolang het aantal families klein blijft
|
||||||
|
|
||||||
|
Selector-richting:
|
||||||
|
- `:root[data-theme-family="macos-soft"][data-color-mode="dark"] { ... }`
|
||||||
|
- `:root[data-theme-family="macos-soft"][data-color-mode="light"] { ... }`
|
||||||
|
|
||||||
|
Of equivalent met custom properties:
|
||||||
|
- family-bestanden vullen tokens op basis van family+mode
|
||||||
|
- `base.css` gebruikt alleen tokens
|
||||||
|
|
||||||
|
Aanbevolen toepassing:
|
||||||
|
- startup leest `selected_theme` en `selected_color_mode`
|
||||||
|
- zet beide attributen vroeg op `document.documentElement`
|
||||||
|
- bestaande toggle wijzigt alleen `data-color-mode`
|
||||||
|
- Interface settings wijzigen alleen `data-theme-family`
|
||||||
|
|
||||||
|
Dit houdt startup en theme-switching schoon en voorspelbaar.
|
||||||
|
|
||||||
|
## 9. Regressierisico
|
||||||
|
Belangrijkste risico’s:
|
||||||
|
- leesbaarheid en contrast per family/mode
|
||||||
|
- current row / selected row onvoldoende onderscheid
|
||||||
|
- actieve paneelrand te zwak of te dominant
|
||||||
|
- modals en functiebalk die in sommige themes te vlak worden
|
||||||
|
- thumbnail/icon-slot die wegvalt tegen achtergrond
|
||||||
|
- CSS-fragmentatie als family-bestanden toch layoutregels gaan bevatten
|
||||||
|
- editor/viewers die visueel uit de toon vallen als tokens niet breed genoeg zijn
|
||||||
|
|
||||||
|
Belangrijk mitigatieprincipe:
|
||||||
|
- theme-bestanden mogen alleen token- en lichte afwerkingsverschillen bevatten
|
||||||
|
- geen layout overrides per family
|
||||||
|
- smoke-validatie en handmatige check op alle states:
|
||||||
|
- normal row
|
||||||
|
- current row
|
||||||
|
- selected row
|
||||||
|
- current+selected
|
||||||
|
- inactive selected
|
||||||
|
- modal
|
||||||
|
- functiebalk
|
||||||
|
- editor/viewers
|
||||||
|
|
||||||
|
## 10. Teststrategie
|
||||||
|
### Backend golden tests
|
||||||
|
- whitelist accepteert alle built-in themes
|
||||||
|
- ongeldige theme key blijft geblokkeerd
|
||||||
|
- `selected_color_mode` gedrag blijft intact
|
||||||
|
- fallback naar `default` en `dark` blijft correct
|
||||||
|
|
||||||
|
### UI smoke/regressietests
|
||||||
|
- `Settings > Interface` bevat alle built-in theme opties
|
||||||
|
- startup leest theme + color mode uit backend
|
||||||
|
- hoofdinterface dark/light toggle blijft bestaan
|
||||||
|
- data-attributen of equivalent theme-state worden correct toegepast
|
||||||
|
- modals, functiebalk en panelen blijven renderen onder theme-switches
|
||||||
|
|
||||||
|
### Handmatige validatie
|
||||||
|
Per family in light en dark:
|
||||||
|
- panel readability
|
||||||
|
- current row zichtbaarheid
|
||||||
|
- selected row zichtbaarheid
|
||||||
|
- inactive pane selectie
|
||||||
|
- viewer/editor modal contrast
|
||||||
|
- thumbnail/icon-slot contrast
|
||||||
|
- functiebalk leesbaarheid
|
||||||
|
- settings modal tabs en form controls
|
||||||
|
|
||||||
|
## 11. Aanbeveling
|
||||||
|
Aanbevolen v1-richting met laag regressierisico:
|
||||||
|
- built-in themes alleen als veilige whitelist keys
|
||||||
|
- families:
|
||||||
|
- `default`
|
||||||
|
- `macos-soft`
|
||||||
|
- `midnight`
|
||||||
|
- `graphite`
|
||||||
|
- `windows11`
|
||||||
|
- `lcars` expliciet uitstellen naar een aparte latere theme-slice
|
||||||
|
- architectuur:
|
||||||
|
- gedeelde `base.css`
|
||||||
|
- aparte CSS per theme-family
|
||||||
|
- dark/light binnen elke family via selectors/tokens
|
||||||
|
|
||||||
|
Expliciete beoordeling van de voorgestelde architectuur:
|
||||||
|
- `gedeelde base.css`
|
||||||
|
- `aparte CSS per theme-family`
|
||||||
|
|
||||||
|
Dit is de juiste richting voor deze app.
|
||||||
|
Reden:
|
||||||
|
- houdt layout en theming schoon gescheiden
|
||||||
|
- voorkomt één onleesbaar gigantisch CSS-bestand
|
||||||
|
- voorkomt ook duplicatie van complete layoutbestanden per theme+mode
|
||||||
|
- past goed bij de bestaande settings-architectuur met `selected_theme` + `selected_color_mode`
|
||||||
|
- blijft onderhoudbaar als later nog 1-3 families bijkomen
|
||||||
@@ -0,0 +1,220 @@
|
|||||||
|
# Folder Upload v1 Design
|
||||||
|
|
||||||
|
## 1. Doel
|
||||||
|
|
||||||
|
Folder upload voegt waarde toe omdat de huidige uploadflow al bruikbaar is voor losse bestanden en batches, maar niet voor veelvoorkomende workflows waarbij een gebruiker een complete lokale mapstructuur naar de storage wil kopieren. Dat past logisch binnen de bestaande dual-pane workflow: het actieve paneel bepaalt al de doelmap, en upload is al een expliciete actie in de functiebalk.
|
||||||
|
|
||||||
|
De kern van v1 is niet "een nieuwe uploadarchitectuur", maar een gecontroleerde uitbreiding van de bestaande uploadflow zodat een lokale map recursief kan worden ingestuurd naar `currentPath` van het actieve paneel.
|
||||||
|
|
||||||
|
## 2. Scope
|
||||||
|
|
||||||
|
Folder Upload v1 ondersteunt expliciet:
|
||||||
|
- selectie van precies een lokale map via de browser
|
||||||
|
- recursieve upload van de inhoud van die map
|
||||||
|
- behoud van directorystructuur onder het gekozen doelpad
|
||||||
|
- target = `currentPath` van het actieve paneel
|
||||||
|
- hergebruik van de bestaande sequentiele uploadflow en bestaande conflictopties
|
||||||
|
|
||||||
|
Niet in scope voor v1:
|
||||||
|
- meerdere lokale mappen tegelijk
|
||||||
|
- drag & drop
|
||||||
|
- resumable upload
|
||||||
|
- chunked upload
|
||||||
|
- taskmodel-integratie
|
||||||
|
- rollback
|
||||||
|
- backendherontwerp buiten wat strikt nodig is om directorystructuur veilig te ondersteunen
|
||||||
|
|
||||||
|
Aanbevolen v1-scope met laag regressierisico:
|
||||||
|
- precies 1 geselecteerde lokale map
|
||||||
|
- recursieve upload van alle files daaronder
|
||||||
|
- directorystructuur behouden
|
||||||
|
- conflictbehandeling alleen op bestandsniveau via bestaande keuzes
|
||||||
|
|
||||||
|
## 3. Browserselectie
|
||||||
|
|
||||||
|
Browsermatig is folderselectie geen aparte native "map upload API" zoals bij desktop-apps, maar een file input met directory-selectie-attributen zoals `webkitdirectory`. In de praktijk levert dit een lijst bestanden op met relatieve paden binnen de gekozen map.
|
||||||
|
|
||||||
|
Dit past redelijk goed bij de bestaande native file picker flow:
|
||||||
|
- huidige uploadknop opent al een browser file picker
|
||||||
|
- voor folder upload kan een aparte, kleine flow dezelfde picker gebruiken, maar dan in directory-selectiemodus
|
||||||
|
- drag & drop is niet nodig voor v1
|
||||||
|
|
||||||
|
Aanbeveling:
|
||||||
|
- v1 gebruikt browser-native directory picker via input-attributen
|
||||||
|
- geen drag & drop
|
||||||
|
- geen extra dependency
|
||||||
|
|
||||||
|
## 4. Doelstructuur
|
||||||
|
|
||||||
|
De veiligste en meest voorspelbare semantiek voor v1 is:
|
||||||
|
- de geselecteerde mapnaam zelf wordt meegenomen in de doelstructuur
|
||||||
|
- dus upload van lokale map `Photos/` naar target `/Volumes/8TB/Uploads` resulteert in:
|
||||||
|
- `/Volumes/8TB/Uploads/Photos/...`
|
||||||
|
|
||||||
|
Dit voorkomt ambiguiteit en sluit aan op gebruikersverwachting uit file managers.
|
||||||
|
|
||||||
|
Relatieve paden:
|
||||||
|
- browser levert per bestand een relatief pad onder de gekozen rootmap
|
||||||
|
- frontend mag dat relatieve pad gebruiken als beschrijving van directorystructuur
|
||||||
|
- backend mag die structuur nooit blind vertrouwen zonder per segment validatie
|
||||||
|
|
||||||
|
Aanbevolen semantiek:
|
||||||
|
- geselecteerde mapnaam opnemen
|
||||||
|
- directorystructuur daaronder behouden
|
||||||
|
- alle relatieve padsegmenten strikt normaliseren en valideren
|
||||||
|
|
||||||
|
## 5. Conflictgedrag
|
||||||
|
|
||||||
|
Conflictgedrag moet in v1 voortbouwen op de bestaande uploadconflictflow.
|
||||||
|
|
||||||
|
### Bestandsconflicten
|
||||||
|
Bij een bestaand doelbestand:
|
||||||
|
- `Overwrite`: huidig bestand overschrijven
|
||||||
|
- `Overwrite all`: huidige en volgende bestandsconflicten overschrijven
|
||||||
|
- `Skip`: huidig bestand overslaan
|
||||||
|
- `Skip all`: huidige en volgende bestandsconflicten overslaan
|
||||||
|
- `Cancel`: resterende upload stoppen
|
||||||
|
|
||||||
|
### Directoryconflicten
|
||||||
|
Directoryconflict is subtieler. Als de doelmap al bestaat en ook een directory is, hoeft dat in v1 geen fout te zijn. Dat is juist het normale mechanisme om inhoud in een bestaande mapstructuur te laten landen.
|
||||||
|
|
||||||
|
Aanbevolen v1-regel:
|
||||||
|
- bestaande doel-directory: toegestaan, geen conflictmodal
|
||||||
|
- bestaande doel-directory fungeert als containermap voor verdere recursie
|
||||||
|
|
||||||
|
### Typeconflicten
|
||||||
|
Als een padsegment een typeconflict veroorzaakt, bijvoorbeeld:
|
||||||
|
- lokale structuur verwacht een directory
|
||||||
|
- maar op bestemming bestaat daar een file
|
||||||
|
|
||||||
|
Dan moet dit als conflict/failure behandeld worden. De bestaande conflictknoppen kunnen dan alleen zinnig worden toegepast als overschrijven echt veilig definieerbaar is. Voor v1 is dat te riskant op directoryniveau.
|
||||||
|
|
||||||
|
Aanbevolen v1-regel:
|
||||||
|
- typeconflict directory-versus-file niet proberen slim op te lossen
|
||||||
|
- behandel als blokkade/failure voor het huidige bestand
|
||||||
|
- laat bestaande flow stoppen of conflictueel handelen op bestandsniveau, maar niet op "directory vervangen"
|
||||||
|
|
||||||
|
## 6. Backend-impact
|
||||||
|
|
||||||
|
De bestaande backend uploadbasis is grotendeels herbruikbaar voor de feitelijke bestandsoverdracht, maar folder upload heeft waarschijnlijk extra backendondersteuning nodig voor directorystructuur.
|
||||||
|
|
||||||
|
Het bestaande endpoint ondersteunt nu:
|
||||||
|
- 1 file per request
|
||||||
|
- `target_path`
|
||||||
|
- basename-validatie
|
||||||
|
|
||||||
|
Voor folder upload is minimaal een van deze routes nodig:
|
||||||
|
|
||||||
|
### Route A: frontend maakt directories expliciet aan
|
||||||
|
- frontend leest relatieve paden
|
||||||
|
- frontend zorgt eerst dat directories bestaan via bestaand `mkdir` endpoint
|
||||||
|
- daarna uploadt frontend elk bestand naar het juiste `target_path`
|
||||||
|
|
||||||
|
Voordelen:
|
||||||
|
- weinig nieuw backendcontract
|
||||||
|
- hergebruik van bestaande `mkdir` en `upload`
|
||||||
|
|
||||||
|
Nadelen:
|
||||||
|
- meer frontendcoordinatie
|
||||||
|
- meer requests
|
||||||
|
|
||||||
|
### Route B: upload-endpoint accepteert veilige relatieve subpath
|
||||||
|
- per bestand meegeven:
|
||||||
|
- `target_path`
|
||||||
|
- `relative_path`
|
||||||
|
- `file`
|
||||||
|
- backend maakt ontbrekende directories aan na validatie
|
||||||
|
|
||||||
|
Voordelen:
|
||||||
|
- schonere folder-uploadflow
|
||||||
|
- minder frontendcomplexiteit
|
||||||
|
|
||||||
|
Nadelen:
|
||||||
|
- nieuw backendcontract
|
||||||
|
- iets meer validatielogica
|
||||||
|
|
||||||
|
Aanbeveling voor laag regressierisico:
|
||||||
|
- v1 folder upload liever via Route A ontwerpen:
|
||||||
|
- frontend maakt directories expliciet aan via bestaande of lichte `mkdir`-flow
|
||||||
|
- frontend uploadt bestanden daarna via bestaand endpoint
|
||||||
|
- alleen als dat in praktijk te onhandig blijkt, Route B overwegen
|
||||||
|
|
||||||
|
Beide varianten moeten blijven leunen op:
|
||||||
|
- `path_guard`
|
||||||
|
- bestaande whitelist/root-containment
|
||||||
|
- bestaande naamvalidatie per segment
|
||||||
|
|
||||||
|
## 7. Frontend-impact
|
||||||
|
|
||||||
|
De bestaande sequentiele uploadflow kan worden uitgebreid zonder herontwerp:
|
||||||
|
- browser levert lijst bestanden uit de gekozen map
|
||||||
|
- frontend groepeert impliciet op relatieve directorystructuur
|
||||||
|
- frontend zorgt dat doel-directories bestaan
|
||||||
|
- frontend uploadt daarna de files sequentieel
|
||||||
|
|
||||||
|
Voortgang bij veel bestanden:
|
||||||
|
- huidige compacte progress UI kan blijven
|
||||||
|
- tonen:
|
||||||
|
- aantal totaal
|
||||||
|
- huidig bestand
|
||||||
|
- doelpad of huidige relatieve submap indien nuttig
|
||||||
|
- geen zware task-UI nodig in v1
|
||||||
|
|
||||||
|
Aanbevolen v1-richting:
|
||||||
|
- zelfde uploadmodal/progresscomponent als nu
|
||||||
|
- alleen uitbreiden met "uploading folder X to path Y"
|
||||||
|
- geen tweede aparte uploadarchitectuur
|
||||||
|
|
||||||
|
## 8. Regressierisico
|
||||||
|
|
||||||
|
Belangrijkste risico's:
|
||||||
|
- security: relatieve paden uit browser niet blind vertrouwen
|
||||||
|
- diepe mapstructuren: veel requests, langzame voortgang
|
||||||
|
- gedeeltelijke successen/failures: batch kan halverwege stoppen
|
||||||
|
- conflictcomplexiteit: directoryconflicten versus bestandsconflicten
|
||||||
|
- UI-complexiteit: folder upload mag bestaande file upload niet verwarren
|
||||||
|
|
||||||
|
Specifiek risico:
|
||||||
|
- een ogenschijnlijk simpele folder-upload kan ongemerkt uitgroeien tot een mini-sync-engine
|
||||||
|
- dat moet expliciet vermeden worden
|
||||||
|
|
||||||
|
## 9. Teststrategie
|
||||||
|
|
||||||
|
### Backend golden tests
|
||||||
|
Als folder upload later gebouwd wordt, minimaal testen:
|
||||||
|
- create-mkdir-then-upload flow voor nested directorystructuur
|
||||||
|
- traversal blokkade op relatieve padsegmenten
|
||||||
|
- invalid filename segment blokkade
|
||||||
|
- typeconflict file-versus-directory
|
||||||
|
- conflict op bestaand bestand
|
||||||
|
- upload naar bestaande directorystructuur
|
||||||
|
|
||||||
|
### UI smoke/regressietests
|
||||||
|
- folder-upload startpunt aanwezig
|
||||||
|
- progress UI blijft werken
|
||||||
|
- conflictopties blijven intact
|
||||||
|
- actieve-paneel target blijft leidend
|
||||||
|
|
||||||
|
### Handmatige validatie
|
||||||
|
- map met alleen files
|
||||||
|
- map met nested subdirs
|
||||||
|
- map met enkele conflicten
|
||||||
|
- map met typeconflict
|
||||||
|
- lange/brede directorystructuur
|
||||||
|
|
||||||
|
## 10. Aanbeveling
|
||||||
|
|
||||||
|
De aanbevolen v1-richting met laag regressierisico is:
|
||||||
|
- ondersteun precies 1 lokale map
|
||||||
|
- behoud de geselecteerde mapnaam in de doelstructuur
|
||||||
|
- gebruik browser-native directory picker
|
||||||
|
- breid de bestaande sequentiele uploadflow uit in plaats van een nieuwe architectuur te bouwen
|
||||||
|
- houd conflictbehandeling primair op bestandsniveau
|
||||||
|
- behandel bestaande directories als toegestaan
|
||||||
|
- vermijd drag & drop, taskintegratie, chunking en resumable uploads
|
||||||
|
|
||||||
|
Concreet aanbevolen technische richting:
|
||||||
|
- eerst proberen met bestaande architectuur en expliciete directorycreatie vanuit frontend
|
||||||
|
- alleen als dat te fragiel blijkt een kleine backenduitbreiding voor veilige relatieve paden ontwerpen
|
||||||
|
|
||||||
|
Dit houdt folder upload klein, bruikbaar en beheersbaar zonder de bestaande uploadflow opnieuw uit te vinden.
|
||||||
@@ -0,0 +1,217 @@
|
|||||||
|
# IMAGE_VIEWER_AND_INFO_V1.md
|
||||||
|
|
||||||
|
## 1. Doel
|
||||||
|
|
||||||
|
Een volledige image viewer voegt nu directe waarde toe omdat de app al image-bestanden kan tonen in de lijst, thumbnails kent, en type-specifieke viewers heeft voor tekst, video en PDF. Voor afbeeldingen ontbreekt nog de logische volgende stap: het geselecteerde bestand volledig bekijken zonder download- of externe viewerstap.
|
||||||
|
|
||||||
|
Een kleine uitbreiding van File Info met image-specifieke metadata voegt ook waarde toe. Voor afbeeldingen zijn afmetingen vaak net zo relevant als naam, grootte en modified time. Dat helpt bij snelle selectie, kwaliteitscontrole en onderscheid tussen vergelijkbare bestanden.
|
||||||
|
|
||||||
|
Dit past goed binnen de bestaande dual-pane workflow zolang:
|
||||||
|
- openen een lichte modalactie blijft
|
||||||
|
- de browse-flow niet verandert
|
||||||
|
- de info-uitbreiding read-only en goedkoop blijft
|
||||||
|
|
||||||
|
## 2. Scope
|
||||||
|
|
||||||
|
In scope voor v1:
|
||||||
|
- volledige image viewer voor:
|
||||||
|
- `jpg`
|
||||||
|
- `jpeg`
|
||||||
|
- `png`
|
||||||
|
- `webp`
|
||||||
|
- `gif`
|
||||||
|
- `bmp`
|
||||||
|
- `avif` als browser-native rendering zonder extra complexiteit werkt
|
||||||
|
- aparte image-modal
|
||||||
|
- read-only
|
||||||
|
- standaard `fit-to-view`
|
||||||
|
- basis zoom:
|
||||||
|
- zoom in
|
||||||
|
- zoom out
|
||||||
|
- reset
|
||||||
|
- File Info uitbreiding met:
|
||||||
|
- `width`
|
||||||
|
- `height`
|
||||||
|
|
||||||
|
Niet in scope:
|
||||||
|
- edit
|
||||||
|
- crop/rotate
|
||||||
|
- slideshow
|
||||||
|
- metadata editor
|
||||||
|
- EXIF-inspectie als brede feature
|
||||||
|
- thumbnails in de viewer
|
||||||
|
- multi-image navigation
|
||||||
|
|
||||||
|
Aanbevolen v1-richting:
|
||||||
|
- `jpg/jpeg/png/webp/gif/bmp` volwaardig ondersteunen
|
||||||
|
- `avif` best-effort, zonder extra garanties
|
||||||
|
- geen extra dependency alleen om `avif` of exotische metadata te forceren
|
||||||
|
|
||||||
|
## 3. Startgedrag
|
||||||
|
|
||||||
|
Aanbevolen v1-gedrag:
|
||||||
|
- `F3` opent de image viewer bij exact 1 geselecteerd image-bestand
|
||||||
|
- de bestaande `View`-knop gebruikt dezelfde centrale type-dispatch
|
||||||
|
- gewone `Enter`-semantiek blijft intact
|
||||||
|
|
||||||
|
Concreet:
|
||||||
|
- `F3` / `View` dispatch:
|
||||||
|
- tekst -> text viewer
|
||||||
|
- video -> video viewer
|
||||||
|
- pdf -> pdf viewer
|
||||||
|
- image -> image viewer
|
||||||
|
- bij geen selectie of multi-select doet `F3` niets als `View` disabled zou zijn
|
||||||
|
- directory-open gedrag via gewone `Enter` of directorynaam blijft onaangetast
|
||||||
|
|
||||||
|
## 4. Viewer-richting
|
||||||
|
|
||||||
|
Aanbevolen v1-richting: aparte image-modal met browser-native afbeeldingselement (`img`) en lichte frontend-zoom.
|
||||||
|
|
||||||
|
Waarom:
|
||||||
|
- geen extra dependency nodig
|
||||||
|
- laag regressierisico
|
||||||
|
- goed te combineren met bestaande modalarchitectuur
|
||||||
|
- voldoende voor een bruikbare eerste viewer
|
||||||
|
|
||||||
|
Aanbevolen UX:
|
||||||
|
- afbeelding centraal in een aparte modal
|
||||||
|
- standaard `fit-to-view`
|
||||||
|
- controls:
|
||||||
|
- `Zoom in`
|
||||||
|
- `Zoom out`
|
||||||
|
- `Reset`
|
||||||
|
- sluiten via:
|
||||||
|
- `X`
|
||||||
|
- `Escape`
|
||||||
|
- overlay-click alleen meenemen als dat geen conflict geeft met zoom/pan-interactie; anders weglaten in v1
|
||||||
|
|
||||||
|
Pannen/slepen:
|
||||||
|
- optioneel in v1
|
||||||
|
- alleen toevoegen als licht en stabiel
|
||||||
|
- geen ingewikkelde canvas/viewer-stack bouwen
|
||||||
|
|
||||||
|
Aanbevolen minimalistische v1:
|
||||||
|
- CSS transform zoom
|
||||||
|
- centreren zolang mogelijk
|
||||||
|
- eventueel natuurlijke browser-scroll/pan bij grotere zoom, in plaats van custom drag-logica
|
||||||
|
|
||||||
|
## 5. Backend-impact
|
||||||
|
|
||||||
|
Aanbevolen backendrichting:
|
||||||
|
- nieuw read-only image endpoint, analoog aan PDF/video, bijvoorbeeld:
|
||||||
|
- `GET /api/files/image?path=...`
|
||||||
|
|
||||||
|
Waarom een apart endpoint beter is dan hergebruik van random file-serving:
|
||||||
|
- consistente foutmapping
|
||||||
|
- duidelijke content-type-afhandeling
|
||||||
|
- hergebruik van bestaande `path_guard`
|
||||||
|
- expliciete scheiding van concerns per viewertype
|
||||||
|
|
||||||
|
Eisen:
|
||||||
|
- padvalidatie via bestaand `path_guard`
|
||||||
|
- alleen files
|
||||||
|
- directory -> bestaande `type_conflict`
|
||||||
|
- path not found -> bestaande not-found fout
|
||||||
|
- traversal / invalid root alias / outside whitelist -> bestaande securityfouten
|
||||||
|
- streaming/serving zonder onnodige buffering
|
||||||
|
- passend `Content-Type`
|
||||||
|
|
||||||
|
Geen nieuwe backendsemantiek nodig buiten een read-only route.
|
||||||
|
|
||||||
|
## 6. Frontend-impact
|
||||||
|
|
||||||
|
Aanbevolen frontendrichting:
|
||||||
|
- aparte image-modal
|
||||||
|
- geen hergebruik van text/video/pdf modalbody
|
||||||
|
- wel dezelfde modalstructuur en focusregels als bestaande viewers
|
||||||
|
|
||||||
|
Waarom een aparte modal:
|
||||||
|
- image viewing heeft eigen interactie (fit/zoom)
|
||||||
|
- voorkomt rommelige uitzonderingslogica in de bestaande text viewer
|
||||||
|
- houdt type-dispatch helder
|
||||||
|
|
||||||
|
Focusgedrag:
|
||||||
|
- terwijl image-modal open is, geen paneelkeyboardnavigatie
|
||||||
|
- `Escape` sluit modal
|
||||||
|
- `F3` en `View` blijven via dezelfde dispatch werken
|
||||||
|
|
||||||
|
## 7. File Info uitbreiding
|
||||||
|
|
||||||
|
Aanbevolen extra velden voor image-bestanden in v1:
|
||||||
|
- `width`
|
||||||
|
- `height`
|
||||||
|
- `content_type`
|
||||||
|
|
||||||
|
Optioneel, maar niet nodig voor v1:
|
||||||
|
- kleurprofiel
|
||||||
|
- EXIF orientation
|
||||||
|
- camera metadata
|
||||||
|
- creation date uit EXIF
|
||||||
|
|
||||||
|
Aanbevolen aanpak:
|
||||||
|
- alleen goedkope metadata
|
||||||
|
- afmetingen server-side afleiden zonder zware analyse
|
||||||
|
- geen brede EXIF feature
|
||||||
|
|
||||||
|
Voor niet-image bestanden blijven `width` en `height` gewoon `null`.
|
||||||
|
|
||||||
|
## 8. Regressierisico
|
||||||
|
|
||||||
|
Belangrijkste risico's:
|
||||||
|
- view-dispatch wordt rommeliger als image niet netjes als eigen type wordt behandeld
|
||||||
|
- modalfocus kan bestaande keyboardflow blokkeren of laten lekken
|
||||||
|
- grote afbeeldingen kunnen trager laden of veel viewport-ruimte vragen
|
||||||
|
- File Info response-uitbreiding moet backward-compatible blijven
|
||||||
|
|
||||||
|
Mitigatie:
|
||||||
|
- aparte image-modal
|
||||||
|
- eigen `isImageSelection(...)` helper in dezelfde dispatchstijl als video/pdf
|
||||||
|
- geen wijziging aan gewone `Enter`
|
||||||
|
- alleen extra velden aan File Info toevoegen, geen bestaande velden wijzigen
|
||||||
|
- zoom klein en beheersbaar houden
|
||||||
|
|
||||||
|
## 9. Teststrategie
|
||||||
|
|
||||||
|
Backend golden tests:
|
||||||
|
- image endpoint success voor ondersteund imagebestand
|
||||||
|
- directory -> `type_conflict`
|
||||||
|
- path not found
|
||||||
|
- traversal blocked
|
||||||
|
- invalid root alias
|
||||||
|
- non-image blocked of duidelijke unsupported fout
|
||||||
|
- File Info success voor imagebestand met `width`/`height`
|
||||||
|
- File Info voor niet-image met `width`/`height = null`
|
||||||
|
|
||||||
|
UI smoke/regressietests:
|
||||||
|
- image-modal container aanwezig
|
||||||
|
- image viewer wiring aanwezig in `F3`/`View` dispatch
|
||||||
|
- text/video/pdf modal containers blijven aanwezig
|
||||||
|
- File Info modal blijft aanwezig
|
||||||
|
- geen extra zichtbare knop toegevoegd
|
||||||
|
|
||||||
|
Handmatige validatie:
|
||||||
|
- `F3` opent image viewer bij exact 1 image
|
||||||
|
- `View` opent dezelfde image viewer
|
||||||
|
- zoom in/out/reset werkt
|
||||||
|
- sluiten via `X` en `Escape` werkt
|
||||||
|
- gewone `Enter` blijft directory/open-semantiek houden
|
||||||
|
- File Info toont width/height voor images
|
||||||
|
- grote afbeelding blijft bruikbaar zonder layoutbreuk
|
||||||
|
|
||||||
|
## 10. Aanbeveling
|
||||||
|
|
||||||
|
Aanbevolen v1-richting met laag regressierisico:
|
||||||
|
- nieuw read-only image endpoint
|
||||||
|
- aparte image-modal met browser-native `img`
|
||||||
|
- lichte zoombediening zonder externe image-viewer library
|
||||||
|
- `F3` en `View` gebruiken de bestaande centrale type-dispatch
|
||||||
|
- File Info uitbreiden met alleen goedkope image metadata:
|
||||||
|
- `width`
|
||||||
|
- `height`
|
||||||
|
- bestaand `content_type`
|
||||||
|
- `avif` alleen best-effort, zonder extra dependency of browsergarantie
|
||||||
|
|
||||||
|
Dit houdt de stap klein, veilig en consistent met de bestaande architectuur:
|
||||||
|
- viewers blijven type-specifiek
|
||||||
|
- File Info blijft read-only
|
||||||
|
- browse- en keyboardflow blijven intact
|
||||||
@@ -0,0 +1,233 @@
|
|||||||
|
# Local Upload v1
|
||||||
|
|
||||||
|
## 1. Doel
|
||||||
|
Local upload voegt nu direct waarde toe omdat de app al een bruikbare dual-pane bestandsworkflow heeft, maar nog geen ingang om bestanden vanaf de lokale machine de beheerde storage in te brengen. Dat gat is functioneel groot: browse, rename, move, copy en delete bestaan al, maar import ontbreekt.
|
||||||
|
|
||||||
|
Binnen de dual-pane workflow is de meest natuurlijke semantiek:
|
||||||
|
- bron: lokale machine via de native browser file picker
|
||||||
|
- doel: `currentPath` van het actieve paneel
|
||||||
|
|
||||||
|
Dat houdt het model eenvoudig en voorspelbaar. De gebruiker kiest eerst waar in de storage hij staat, en uploadt daarna naar die locatie.
|
||||||
|
|
||||||
|
## 2. Scope
|
||||||
|
Aanbevolen scope voor v1:
|
||||||
|
- upload van lokale bestanden via browser naar storage
|
||||||
|
- target = `currentPath` van het actieve paneel
|
||||||
|
- native browser file picker gebruiken
|
||||||
|
- single-file upload
|
||||||
|
- multi-file upload
|
||||||
|
- geen folder upload in v1
|
||||||
|
- geen drag & drop in v1
|
||||||
|
- geen resumable upload
|
||||||
|
- geen chunked upload
|
||||||
|
|
||||||
|
Motivatie:
|
||||||
|
- Multi-file upload via de native picker is klein en nuttig.
|
||||||
|
- Folder upload verhoogt de complexiteit direct sterk: recursie, conflictgedrag, voortgang, directory-creatie, mixed failures.
|
||||||
|
- Drag & drop is UX-matig aantrekkelijk, maar voegt event-complexiteit en extra foutpaden toe zonder dat het nodig is voor een eerste bruikbare versie.
|
||||||
|
- Chunking/resume is pas zinvol als gewone multipart upload aantoonbaar onvoldoende is.
|
||||||
|
|
||||||
|
## 3. Startgedrag / UI
|
||||||
|
Voor v1:
|
||||||
|
- een `Upload` knop links van `F1 Settings` in de onderbalk/topactiezone waar die nu logisch past
|
||||||
|
- klik op `Upload` opent direct de native browser file picker
|
||||||
|
- de upload werkt altijd naar het actieve paneel
|
||||||
|
- de UI toont compact en expliciet:
|
||||||
|
- `Upload to: <currentPath van actief paneel>`
|
||||||
|
|
||||||
|
Aanbevolen flow:
|
||||||
|
1. gebruiker activeert een paneel
|
||||||
|
2. gebruiker klikt `Upload`
|
||||||
|
3. browser opent native file picker
|
||||||
|
4. gebruiker kiest 1 of meerdere bestanden
|
||||||
|
5. upload start naar `currentPath` van actief paneel
|
||||||
|
6. voortgang wordt zichtbaar
|
||||||
|
7. na afronding wordt het actieve paneel refreshed
|
||||||
|
|
||||||
|
Belangrijk:
|
||||||
|
- de actieve-paneelcontext moet vooraf duidelijk zijn
|
||||||
|
- de knop hoeft niet disabled te zijn zolang een geldig `currentPath` bestaat
|
||||||
|
- als een modal open is, moet `Upload` niet tegelijk een nieuwe flow starten
|
||||||
|
|
||||||
|
## 4. Voortgang
|
||||||
|
Aanbevolen v1-model:
|
||||||
|
- één compacte upload-progress UI per lopende uploadbatch
|
||||||
|
- globale voortgang over de batch
|
||||||
|
- daarnaast compacte status per huidig bestand indien nodig
|
||||||
|
|
||||||
|
V1 hoeft niet meteen een volledige task-UI te hergebruiken. De eenvoudigste bruikbare richting is:
|
||||||
|
- één uploadstatusblok of kleine modal
|
||||||
|
- toont:
|
||||||
|
- totaal aantal bestanden
|
||||||
|
- huidig bestand
|
||||||
|
- globale voortgangsbalk of percentage
|
||||||
|
|
||||||
|
Aanbevolen velden in de UI:
|
||||||
|
- `Uploading 3 files to /Volumes/...`
|
||||||
|
- `2/3 files`
|
||||||
|
- huidige bestandsnaam
|
||||||
|
- percentage of bytes-progress voor de actieve upload
|
||||||
|
|
||||||
|
Dit is lichter dan de bestaande task-list volledig integreren in v1.
|
||||||
|
|
||||||
|
## 5. Backend-impact
|
||||||
|
Er is zeer waarschijnlijk een nieuw upload-endpoint nodig, bijvoorbeeld:
|
||||||
|
- `POST /api/files/upload`
|
||||||
|
|
||||||
|
Verwachte vorm:
|
||||||
|
- multipart/form-data
|
||||||
|
- target path als apart veld, bijvoorbeeld `target_path`
|
||||||
|
- één of meerdere file parts
|
||||||
|
|
||||||
|
Veiligheidsmodel:
|
||||||
|
- `target_path` altijd via bestaande `path_guard`
|
||||||
|
- target moet binnen whitelist/toegestane roots vallen
|
||||||
|
- target moet bestaan
|
||||||
|
- target moet een directory zijn
|
||||||
|
- bestandsnamen niet vertrouwen vanuit clientpad-informatie
|
||||||
|
- alleen de basename van het gekozen lokale bestand gebruiken
|
||||||
|
- validatie van naam via bestaande naamregels (`validate_name` of equivalent)
|
||||||
|
- geen client-side padsegmenten overnemen
|
||||||
|
|
||||||
|
Traversalpreventie:
|
||||||
|
- geen directorystructuur uit de browser aan serverzijde interpreteren in v1
|
||||||
|
- geen relatieve paden uit multipart metadata vertrouwen
|
||||||
|
- ieder bestand wordt server-side gemapt naar:
|
||||||
|
- `target_path / validated_basename`
|
||||||
|
|
||||||
|
## 6. Conflictgedrag
|
||||||
|
Ontwerp voor Engelstalige keuzes:
|
||||||
|
- `Overwrite`
|
||||||
|
- `Overwrite all`
|
||||||
|
- `Skip`
|
||||||
|
- `Cancel`
|
||||||
|
|
||||||
|
Aanbevolen v1-gedrag:
|
||||||
|
- conflictcontrole gebeurt server-side per bestand
|
||||||
|
- bij conflict in een batch wordt de batch niet stil doorgezet
|
||||||
|
- de UI toont een compacte conflictmodal voor het huidige conflicterende bestand
|
||||||
|
- de gebruiker kiest één actie
|
||||||
|
|
||||||
|
Semantiek:
|
||||||
|
- `Overwrite`: alleen huidig conflicterend bestand overschrijven
|
||||||
|
- `Overwrite all`: huidig en alle volgende conflicten automatisch overschrijven
|
||||||
|
- `Skip`: huidig conflicterend bestand overslaan en doorgaan
|
||||||
|
- `Cancel`: resterende batch stoppen
|
||||||
|
|
||||||
|
Aanbevolen v1-realisatie:
|
||||||
|
- conflict afhandelen per bestand binnen de uploadbatch-flow
|
||||||
|
- geen complexe vooraf-scan van alle conflicten nodig
|
||||||
|
- geen rollback
|
||||||
|
|
||||||
|
Belangrijk:
|
||||||
|
- ook directoryconflicten moeten duidelijk zijn
|
||||||
|
- als target al een directory met dezelfde naam bevat voor een file-upload, moet dat als conflict/typefout behandeld worden
|
||||||
|
|
||||||
|
## 7. Grote bestanden / performance
|
||||||
|
Aanbevolen v1:
|
||||||
|
- gewone multipart upload
|
||||||
|
- geen chunking
|
||||||
|
- geen resumable upload
|
||||||
|
|
||||||
|
Motivatie:
|
||||||
|
- technisch het eenvoudigst
|
||||||
|
- breed ondersteund door browser en backendstack
|
||||||
|
- voldoende voor een eerste bruikbare versie
|
||||||
|
|
||||||
|
Risico:
|
||||||
|
- zeer grote bestanden kunnen lang duren of mislukken bij netwerkonderbreking
|
||||||
|
- dat risico moet in v1 geaccepteerd en netjes gecommuniceerd worden
|
||||||
|
|
||||||
|
V1 hoeft daarom niet meer te doen dan:
|
||||||
|
- voortgang tonen
|
||||||
|
- foutmelding tonen bij mislukking
|
||||||
|
- geen herstart of resume bieden
|
||||||
|
|
||||||
|
## 8. Relatie met tasks/history
|
||||||
|
Aanbevolen v1:
|
||||||
|
- upload opnemen in `history`
|
||||||
|
- upload niet meteen in het generieke `tasks` model stoppen
|
||||||
|
|
||||||
|
Motivatie:
|
||||||
|
- upload heeft wel auditwaarde, dus history is logisch
|
||||||
|
- task-integratie maakt de slice groter: background execution, task persistence, progress mapping, polling-UI integratie
|
||||||
|
- voor een eerste bruikbare upload is een lichtere directe UI-flow met history-opslag pragmatischer
|
||||||
|
|
||||||
|
History v1 voor upload zou moeten registreren:
|
||||||
|
- operation = `upload`
|
||||||
|
- status = `completed` / `failed`
|
||||||
|
- destination = doelpad
|
||||||
|
- path of source-naam waar nuttig
|
||||||
|
- error_code / error_message bij failure
|
||||||
|
|
||||||
|
Als later blijkt dat uploads langlopend worden of meerdere gelijktijdige uploads normaal zijn, kan task-integratie in v2 logisch worden.
|
||||||
|
|
||||||
|
## 9. Regressierisico
|
||||||
|
Belangrijkste risico's:
|
||||||
|
- security: onbetrouwbare bestandsnamen of target path misbruik
|
||||||
|
- grote bestanden: timeouts of langlopende requests
|
||||||
|
- foutafhandeling: deels geslaagde batch zonder duidelijke feedback
|
||||||
|
- UI-complexiteit: conflictflow kan snel onrustig worden
|
||||||
|
- actieve-paneelcontext: upload naar verkeerd paneel/pad als context niet duidelijk is
|
||||||
|
- conflictafhandeling: onduidelijke semantiek rond overwrite/skip
|
||||||
|
|
||||||
|
Laag-regressierisico aanpak:
|
||||||
|
- target altijd expliciet koppelen aan actief paneel
|
||||||
|
- geen folder upload
|
||||||
|
- geen drag & drop
|
||||||
|
- geen chunking/resume
|
||||||
|
- compacte conflictmodal per bestand
|
||||||
|
- direct paneelrefresh na succesvolle upload(s)
|
||||||
|
|
||||||
|
## 10. Teststrategie
|
||||||
|
Backend golden tests:
|
||||||
|
- upload single file success
|
||||||
|
- upload multi-file success
|
||||||
|
- target path not found
|
||||||
|
- target path is file -> type_conflict
|
||||||
|
- traversal blocked
|
||||||
|
- invalid root alias
|
||||||
|
- invalid filename blocked
|
||||||
|
- conflict -> already_exists of equivalent
|
||||||
|
- overwrite success
|
||||||
|
- skip/cancel flow indien servercontract dat nodig maakt
|
||||||
|
|
||||||
|
UI smoke/regressietests:
|
||||||
|
- `Upload` knop aanwezig links van `F1 Settings`
|
||||||
|
- geen uploadstart als ongeldige UI-context aanwezig is
|
||||||
|
- targetpaneel-context zichtbaar in uploadflow
|
||||||
|
- progress UI verschijnt
|
||||||
|
- conflictkeuze-UI verschijnt met:
|
||||||
|
- `Overwrite`
|
||||||
|
- `Overwrite all`
|
||||||
|
- `Skip`
|
||||||
|
- `Cancel`
|
||||||
|
|
||||||
|
Handmatige validatie:
|
||||||
|
- upload 1 klein bestand
|
||||||
|
- upload meerdere bestanden
|
||||||
|
- conflict op bestaand bestand
|
||||||
|
- overwrite all werkt over meerdere conflicten
|
||||||
|
- skip laat batch doorgaan
|
||||||
|
- cancel stopt batch
|
||||||
|
- actief paneel bepaalt doelpad correct
|
||||||
|
- history bevat upload-resultaten
|
||||||
|
|
||||||
|
## 11. Aanbeveling
|
||||||
|
Aanbevolen v1-richting met laag regressierisico:
|
||||||
|
- native browser file picker
|
||||||
|
- single + multi-file upload
|
||||||
|
- target = `currentPath` van actief paneel
|
||||||
|
- geen folder upload
|
||||||
|
- geen drag & drop
|
||||||
|
- gewone multipart upload
|
||||||
|
- directe voortgangsweergave in lichte upload-UI
|
||||||
|
- conflictafhandeling per bestand met:
|
||||||
|
- `Overwrite`
|
||||||
|
- `Overwrite all`
|
||||||
|
- `Skip`
|
||||||
|
- `Cancel`
|
||||||
|
- wel history-integratie
|
||||||
|
- nog geen task-integratie
|
||||||
|
|
||||||
|
Dit is de kleinste versie die echt bruikbaar is, zonder meteen te ontsporen in mediaserver- of synchronisatiecomplexiteit.
|
||||||
@@ -0,0 +1,338 @@
|
|||||||
|
# Remote Client Shares Implementation Phases V1.1
|
||||||
|
|
||||||
|
## Doel
|
||||||
|
|
||||||
|
Dit document splitst `REMOTE_CLIENT_SHARES_V1_DESIGN.md` op in pragmatische implementatiefases.
|
||||||
|
|
||||||
|
Uitgangspunten:
|
||||||
|
|
||||||
|
- geen overengineering
|
||||||
|
- elke fase moet zelfstandig waarde leveren
|
||||||
|
- WebManager mag nooit blokkeren op remote agents
|
||||||
|
- bestaande storage-functionaliteit moet intact blijven
|
||||||
|
- `/Clients` blijft een aparte source, geen uitbreiding van lokale filesystem roots
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Overzicht
|
||||||
|
|
||||||
|
### Phase 1
|
||||||
|
|
||||||
|
Client registry, identiteit en statusmodel.
|
||||||
|
|
||||||
|
### Phase 2
|
||||||
|
|
||||||
|
Browse van remote client shares via virtuele `Clients` root.
|
||||||
|
|
||||||
|
### Phase 3
|
||||||
|
|
||||||
|
Info, tekstpreview, eenvoudige image preview en download voor remote shares.
|
||||||
|
|
||||||
|
### Later
|
||||||
|
|
||||||
|
Alle write-acties, bookmarks/startup paths en cross-source flows.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 1: Client Registry
|
||||||
|
|
||||||
|
## Doel
|
||||||
|
|
||||||
|
WebManager moet remote agents kennen, identificeren en hun status betrouwbaar kunnen bijhouden.
|
||||||
|
|
||||||
|
## Resultaat
|
||||||
|
|
||||||
|
De backend en UI kunnen een lijst van bekende clients tonen, inclusief stabiele identiteit en basisstatus.
|
||||||
|
|
||||||
|
## In scope
|
||||||
|
|
||||||
|
- remote client registratie
|
||||||
|
- heartbeat endpoint
|
||||||
|
- opslag van client metadata
|
||||||
|
- statusmodel met gescheiden velden
|
||||||
|
- lijstendpoint voor bekende clients
|
||||||
|
- registratie-auth
|
||||||
|
- agent-access-auth contract vastleggen
|
||||||
|
|
||||||
|
## Niet in scope
|
||||||
|
|
||||||
|
- browsen in shares
|
||||||
|
- file operations
|
||||||
|
- download
|
||||||
|
- rename/delete/mkdir
|
||||||
|
|
||||||
|
## Beslissingen
|
||||||
|
|
||||||
|
- `client_id` is leidend
|
||||||
|
- `display_name` is niet leidend
|
||||||
|
- browse-routing mag niet afhankelijk zijn van alleen displaynaam
|
||||||
|
- `last_seen`, `status`, `last_error` en `reachable_at` blijven logisch gescheiden
|
||||||
|
|
||||||
|
## Backendwerk
|
||||||
|
|
||||||
|
Nieuwe onderdelen:
|
||||||
|
|
||||||
|
- repository voor remote clients
|
||||||
|
- service voor registratie en heartbeat
|
||||||
|
- statusafleiding
|
||||||
|
- opslag van auth- en endpointmetadata
|
||||||
|
- routes:
|
||||||
|
- `POST /api/clients/register`
|
||||||
|
- `POST /api/clients/heartbeat`
|
||||||
|
- `GET /api/clients`
|
||||||
|
|
||||||
|
Waarschijnlijk te wijzigen:
|
||||||
|
|
||||||
|
- [main.py](/workspace/webmanager-mvp/webui/backend/app/main.py)
|
||||||
|
- [dependencies.py](/workspace/webmanager-mvp/webui/backend/app/dependencies.py)
|
||||||
|
|
||||||
|
Waarschijnlijk nieuw:
|
||||||
|
|
||||||
|
- `webui/backend/app/api/routes_clients.py`
|
||||||
|
- `webui/backend/app/services/remote_client_service.py`
|
||||||
|
- `webui/backend/app/db/remote_client_repository.py`
|
||||||
|
|
||||||
|
## Agentwerk
|
||||||
|
|
||||||
|
- vaste config inlezen
|
||||||
|
- `client_id` beheren
|
||||||
|
- registratie naar WebManager
|
||||||
|
- periodieke heartbeat
|
||||||
|
- agent-access-token config toevoegen
|
||||||
|
|
||||||
|
## UI-werk
|
||||||
|
|
||||||
|
Minimaal:
|
||||||
|
|
||||||
|
- geen browse-integratie nodig
|
||||||
|
- een eenvoudige clientlijst of debug-status is voldoende
|
||||||
|
|
||||||
|
## Acceptatiecriteria
|
||||||
|
|
||||||
|
- agent kan zich registreren
|
||||||
|
- client verschijnt in `GET /api/clients`
|
||||||
|
- `last_seen` wordt bijgewerkt
|
||||||
|
- `status` wordt afgeleid zonder te flappen
|
||||||
|
- `last_error` en `reachable_at` bestaan als apart concept
|
||||||
|
- server blijft normaal werken als er geen agents bestaan
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 2: Browse via `/Clients`
|
||||||
|
|
||||||
|
## Doel
|
||||||
|
|
||||||
|
Remote clients en hun shares moeten zichtbaar worden in dezelfde browse-ervaring als server storage, zonder lokale services te vervormen.
|
||||||
|
|
||||||
|
## Resultaat
|
||||||
|
|
||||||
|
De gebruiker kan navigeren naar:
|
||||||
|
|
||||||
|
- `/Clients`
|
||||||
|
- `/Clients/<client>`
|
||||||
|
- `/Clients/<client>/<share>`
|
||||||
|
|
||||||
|
## In scope
|
||||||
|
|
||||||
|
- virtuele root `/Clients`
|
||||||
|
- clientlijst als directories
|
||||||
|
- sharelijst per client als directories
|
||||||
|
- browse binnen share
|
||||||
|
- offline foutafhandeling
|
||||||
|
- agent-auth op browsecalls
|
||||||
|
|
||||||
|
## Niet in scope
|
||||||
|
|
||||||
|
- view/download
|
||||||
|
- edit
|
||||||
|
- rename/delete/mkdir
|
||||||
|
- bookmarks/startup paths
|
||||||
|
|
||||||
|
## Beslissingen
|
||||||
|
|
||||||
|
- `/Clients` wordt vroeg in de backend-route afgehandeld
|
||||||
|
- remote paden mogen niet in gewone lokale `PathGuard` resolution terechtkomen
|
||||||
|
- lokale browse-services blijven verantwoordelijk voor alleen lokale server sources
|
||||||
|
|
||||||
|
## Backendwerk
|
||||||
|
|
||||||
|
Waarschijnlijk te wijzigen:
|
||||||
|
|
||||||
|
- [routes_browse.py](/workspace/webmanager-mvp/webui/backend/app/api/routes_browse.py)
|
||||||
|
|
||||||
|
Liever niet verbreden:
|
||||||
|
|
||||||
|
- [path_guard.py](/workspace/webmanager-mvp/webui/backend/app/security/path_guard.py)
|
||||||
|
|
||||||
|
Nieuwe onderdelen:
|
||||||
|
|
||||||
|
- browse-facade voor remote client paden
|
||||||
|
- agent HTTP client met korte timeouts en auth
|
||||||
|
|
||||||
|
## UI-werk
|
||||||
|
|
||||||
|
Waarschijnlijk te wijzigen:
|
||||||
|
|
||||||
|
- [app.js](/workspace/webmanager-mvp/webui/html/app.js)
|
||||||
|
|
||||||
|
Benodigd:
|
||||||
|
|
||||||
|
- rootnavigatie voor `/Clients`
|
||||||
|
- breadcrumbs voor client/share-paden
|
||||||
|
- render van client/status/share directories
|
||||||
|
- nette foutmelding bij offline client
|
||||||
|
|
||||||
|
## Agentwerk
|
||||||
|
|
||||||
|
Nieuwe browse endpoint(s):
|
||||||
|
|
||||||
|
- `GET /health`
|
||||||
|
- `GET /api/list?share=...&path=...`
|
||||||
|
|
||||||
|
## Acceptatiecriteria
|
||||||
|
|
||||||
|
- `/Clients` toont bekende clients
|
||||||
|
- `/Clients/<client>` toont alleen toegestane shares
|
||||||
|
- `/Clients/<client>/<share>` toont directory-inhoud
|
||||||
|
- offline client geeft een snelle fout, geen hang
|
||||||
|
- `/Volumes` gedrag blijft intact
|
||||||
|
- lokale browse-code blijft logisch gescheiden van remote browse-code
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 3: Info, Preview, Download
|
||||||
|
|
||||||
|
## Doel
|
||||||
|
|
||||||
|
Remote shares moeten read-only bruikbaar worden voor dagelijkse taken.
|
||||||
|
|
||||||
|
## Resultaat
|
||||||
|
|
||||||
|
Gebruiker kan bestanden in remote shares inspecteren, bekijken en downloaden.
|
||||||
|
|
||||||
|
## In scope
|
||||||
|
|
||||||
|
- file info
|
||||||
|
- tekstpreview
|
||||||
|
- eenvoudige image preview
|
||||||
|
- download van remote bestanden
|
||||||
|
- expliciete resource-limieten
|
||||||
|
|
||||||
|
## Niet in scope
|
||||||
|
|
||||||
|
- edit
|
||||||
|
- rename/delete/mkdir
|
||||||
|
- upload
|
||||||
|
- cross-source copy/move
|
||||||
|
|
||||||
|
## Beslissingen
|
||||||
|
|
||||||
|
- tekstpreview krijgt een harde limiet
|
||||||
|
- text/binary-detectie moet expliciet zijn
|
||||||
|
- downloads worden gestreamd
|
||||||
|
- geen grote in-memory buffering voor download
|
||||||
|
|
||||||
|
## Backendwerk
|
||||||
|
|
||||||
|
Nieuwe facade of routes voor remote file actions:
|
||||||
|
|
||||||
|
- info
|
||||||
|
- read/view
|
||||||
|
- download
|
||||||
|
|
||||||
|
Belangrijk:
|
||||||
|
|
||||||
|
- backend vertaalt WebManager-pad naar agent-call
|
||||||
|
- timeouts en foutmapping blijven streng
|
||||||
|
- source-aware afhandeling blijft gescheiden van lokale file ops
|
||||||
|
|
||||||
|
Waarschijnlijk geraakt:
|
||||||
|
|
||||||
|
- `routes_files.py` of parallelle remote-fileroute
|
||||||
|
- aparte service-laag voor remote file proxying
|
||||||
|
|
||||||
|
## UI-werk
|
||||||
|
|
||||||
|
Waarschijnlijk te wijzigen:
|
||||||
|
|
||||||
|
- [app.js](/workspace/webmanager-mvp/webui/html/app.js)
|
||||||
|
|
||||||
|
Benodigd:
|
||||||
|
|
||||||
|
- source-aware afhandeling voor `View`
|
||||||
|
- downloadknop moet remote paths ondersteunen
|
||||||
|
- properties/info moet ook werken voor remote paden
|
||||||
|
|
||||||
|
## Agentwerk
|
||||||
|
|
||||||
|
Nieuwe endpoints:
|
||||||
|
|
||||||
|
- `GET /api/info`
|
||||||
|
- `GET /api/read`
|
||||||
|
- `GET /api/download`
|
||||||
|
|
||||||
|
## Acceptatiecriteria
|
||||||
|
|
||||||
|
- file info werkt voor remote paden
|
||||||
|
- tekstbestand kan bekeken worden binnen limieten
|
||||||
|
- afbeelding kan bekeken worden als ondersteund
|
||||||
|
- download van remote bestand werkt via streaming
|
||||||
|
- foutafhandeling blijft lokaal tot betreffende pane/actie
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Later
|
||||||
|
|
||||||
|
Deze onderdelen horen niet in V1.1.
|
||||||
|
|
||||||
|
### Write-acties
|
||||||
|
|
||||||
|
- mkdir
|
||||||
|
- rename
|
||||||
|
- delete
|
||||||
|
- upload
|
||||||
|
|
||||||
|
### UI-integraties
|
||||||
|
|
||||||
|
- bookmarks voor `/Clients/...`
|
||||||
|
- startup paths voor `/Clients/...`
|
||||||
|
|
||||||
|
### Cross-source flows
|
||||||
|
|
||||||
|
- `/Volumes/...` naar `/Clients/...`
|
||||||
|
- `/Clients/...` naar `/Volumes/...`
|
||||||
|
|
||||||
|
Dit vereist expliciete transfersemantiek en hoort niet in de eerste read-mostly release.
|
||||||
|
|
||||||
|
### Zwaardere netwerkmodellen
|
||||||
|
|
||||||
|
- reverse-connect
|
||||||
|
- tunnelmodel
|
||||||
|
- relay-infrastructuur
|
||||||
|
|
||||||
|
### Sterkere pairing
|
||||||
|
|
||||||
|
- pair codes
|
||||||
|
- per-agent secret rotation
|
||||||
|
- signed registration
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Aanbevolen volgorde
|
||||||
|
|
||||||
|
1. Phase 1 volledig afronden.
|
||||||
|
2. Daarna Phase 2 volledig afronden.
|
||||||
|
3. Daarna Phase 3 read-only afronden.
|
||||||
|
4. Alles daarna alleen oppakken als een concrete productbehoefte dat rechtvaardigt.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Beslisadvies
|
||||||
|
|
||||||
|
Als er snel waarde geleverd moet worden, is de beste minimale keten:
|
||||||
|
|
||||||
|
1. registry
|
||||||
|
2. browse
|
||||||
|
3. info/preview/download
|
||||||
|
|
||||||
|
Daarmee ontstaat een bruikbare remote client bron zonder write-complexiteit, contractbreuk in lokale services of half-afgewerkte transferlogica.
|
||||||
@@ -0,0 +1,697 @@
|
|||||||
|
# Remote Client Shares V1.1 Design
|
||||||
|
|
||||||
|
## Doel
|
||||||
|
|
||||||
|
Een gebruiker van WebManager moet naast de bestaande server-side storage-roots ook een beperkte set lokale mappen van zijn eigen client-Mac kunnen benaderen, zonder de hele homefolder bloot te geven.
|
||||||
|
|
||||||
|
Voorbeelden van toegestane client-shares:
|
||||||
|
|
||||||
|
- `Downloads`
|
||||||
|
- `Movies`
|
||||||
|
- `Pictures`
|
||||||
|
|
||||||
|
De oplossing moet:
|
||||||
|
|
||||||
|
- simpel blijven
|
||||||
|
- veilig blijven
|
||||||
|
- de bestaande storage-workflow niet breken
|
||||||
|
- WebManager niet laten vastlopen als een remote helper-agent offline is
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Kernbeslissingen voor V1.1
|
||||||
|
|
||||||
|
Deze beslissingen liggen in V1.1 vast.
|
||||||
|
|
||||||
|
- Remote client shares worden niet opgenomen in `root_aliases`.
|
||||||
|
- `/Clients` wordt een aparte virtuele bron naast `/Volumes`.
|
||||||
|
- Remote paden lopen niet door de bestaande lokale filesystem-resolutie.
|
||||||
|
- `client_id` is intern de enige leidende identiteit.
|
||||||
|
- `display_name` is alleen voor UI-weergave.
|
||||||
|
- De agent werkt alleen met `share key + relatief pad`.
|
||||||
|
- Alle agent-calls vereisen authenticatie, niet alleen registratie.
|
||||||
|
- Offline agents mogen alleen hun eigen subtree beïnvloeden, nooit de rest van de app.
|
||||||
|
- V1 blijft read-mostly: registry, browse, info, preview, download.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Waarom niet als gewone root alias
|
||||||
|
|
||||||
|
De huidige backend gaat uit van server-side whitelisted filesystem roots.
|
||||||
|
|
||||||
|
Dat model werkt voor:
|
||||||
|
|
||||||
|
- `/Volumes/...`
|
||||||
|
- gemounte server storage
|
||||||
|
- container-side toegankelijke paden
|
||||||
|
|
||||||
|
Dat model werkt niet goed voor:
|
||||||
|
|
||||||
|
- de lokale schijf van de browsergebruiker
|
||||||
|
- een remote Mac die buiten de server draait
|
||||||
|
- clients die offline kunnen zijn
|
||||||
|
- clients die dynamische IP-adressen hebben
|
||||||
|
|
||||||
|
Daarom mogen remote client shares niet in hetzelfde model worden gestopt als `root_aliases`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Scope V1.1
|
||||||
|
|
||||||
|
### In scope
|
||||||
|
|
||||||
|
- beperkte client-shares: `Downloads`, `Movies`, `Pictures`
|
||||||
|
- lokale helper-agent op macOS
|
||||||
|
- agent registratie in WebManager
|
||||||
|
- heartbeat/status tracking
|
||||||
|
- virtuele `Clients` bron in de WebUI
|
||||||
|
- browse van remote shares
|
||||||
|
- bestand-info
|
||||||
|
- tekstpreview
|
||||||
|
- image preview waar triviaal
|
||||||
|
- download van bestanden
|
||||||
|
- nette offline-afhandeling
|
||||||
|
|
||||||
|
### Expliciet niet in V1.1
|
||||||
|
|
||||||
|
- hele homefolder
|
||||||
|
- willekeurige custom paths buiten de toegestane sharelijst
|
||||||
|
- shell/subprocess execution
|
||||||
|
- rename
|
||||||
|
- mkdir
|
||||||
|
- delete
|
||||||
|
- upload naar remote share
|
||||||
|
- bookmarks voor `/Clients/...`
|
||||||
|
- startup paths voor `/Clients/...`
|
||||||
|
- cross-source copy of move
|
||||||
|
- complete taakrunner-integratie zoals server copy/move tasks
|
||||||
|
- automatische LAN discovery
|
||||||
|
- multi-user auth met OS user mapping
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Gewenste gebruikerservaring
|
||||||
|
|
||||||
|
In de WebUI komt naast server-storage een extra virtuele bron:
|
||||||
|
|
||||||
|
- `/Volumes`
|
||||||
|
- `/Clients`
|
||||||
|
|
||||||
|
Onder `/Clients` ziet de gebruiker geregistreerde clients, bijvoorbeeld:
|
||||||
|
|
||||||
|
- `MacBook Pro van Jan`
|
||||||
|
- `iMac Woonkamer`
|
||||||
|
|
||||||
|
Onder een client ziet de gebruiker alleen de toegestane shares:
|
||||||
|
|
||||||
|
- `Downloads`
|
||||||
|
- `Movies`
|
||||||
|
- `Pictures`
|
||||||
|
|
||||||
|
Voor de gebruiker kan dat eruitzien als:
|
||||||
|
|
||||||
|
- `/Clients/MacBook-Pro-van-Jan/Downloads`
|
||||||
|
- `/Clients/MacBook-Pro-van-Jan/Movies`
|
||||||
|
- `/Clients/MacBook-Pro-van-Jan/Pictures`
|
||||||
|
|
||||||
|
Maar intern mag routing niet op `display_name` leunen.
|
||||||
|
|
||||||
|
Intern moet WebManager werken met een stabiele client-identiteit en een mappinglaag:
|
||||||
|
|
||||||
|
- `client_id` voor routing en opslag
|
||||||
|
- `display_name` voor weergave
|
||||||
|
- optioneel een afgeleide slug voor browse-url-presentatie
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Architectuuroverzicht
|
||||||
|
|
||||||
|
Er zijn drie componenten.
|
||||||
|
|
||||||
|
### 1. WebManager backend
|
||||||
|
|
||||||
|
Verantwoordelijk voor:
|
||||||
|
|
||||||
|
- registry van bekende remote clients
|
||||||
|
- status- en heartbeat-tracking
|
||||||
|
- virtuele browse-root `Clients`
|
||||||
|
- proxying van requests naar agents
|
||||||
|
- timeouts en foutafhandeling
|
||||||
|
- scheiding tussen local-source en remote-source afhandeling
|
||||||
|
|
||||||
|
### 2. WebUI frontend
|
||||||
|
|
||||||
|
Verantwoordelijk voor:
|
||||||
|
|
||||||
|
- tonen van `Clients` als extra bron
|
||||||
|
- navigeren binnen client/share paden
|
||||||
|
- offline status tonen
|
||||||
|
- requests afvuren naar gewone WebManager backend-routes
|
||||||
|
|
||||||
|
### 3. Remote helper-agent op macOS
|
||||||
|
|
||||||
|
Verantwoordelijk voor:
|
||||||
|
|
||||||
|
- toegang tot vaste lokale shares
|
||||||
|
- strikte padvalidatie binnen die shares
|
||||||
|
- simpele browse/info/read/download endpoints
|
||||||
|
- zichzelf registreren bij WebManager
|
||||||
|
- heartbeat sturen
|
||||||
|
- auth afdwingen op alle agent-endpoints
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Bereikbaarheidsmodel
|
||||||
|
|
||||||
|
Dit is de eerste harde productbeslissing.
|
||||||
|
|
||||||
|
### V1.1-keuze
|
||||||
|
|
||||||
|
V1.1 gaat uit van een omgeving waarin WebManager de agent rechtstreeks kan bereiken.
|
||||||
|
|
||||||
|
Dat betekent praktisch:
|
||||||
|
|
||||||
|
- dezelfde LAN
|
||||||
|
- of een expliciet configureerbaar agent-endpoint
|
||||||
|
- of een deployment waar server en client netwerkmatig direct verbonden zijn
|
||||||
|
|
||||||
|
### Waarom deze keuze
|
||||||
|
|
||||||
|
Dit is het simpelste model dat functioneel klopt zonder reverse tunnels, websockets als transportlaag, of extra relay-infrastructuur.
|
||||||
|
|
||||||
|
### Wat V1.1 niet probeert op te lossen
|
||||||
|
|
||||||
|
Deze versie garandeert niet dat een agent achter willekeurige NAT/firewall altijd bereikbaar is.
|
||||||
|
|
||||||
|
Dus:
|
||||||
|
|
||||||
|
- self-registration blijft het discoverymodel
|
||||||
|
- direct bereikbare agent-endpoint blijft het V1-transportmodel
|
||||||
|
- reverse-connect of tunnelmodellen zijn uitgesteld
|
||||||
|
|
||||||
|
### Fallback
|
||||||
|
|
||||||
|
Een handmatige endpoint override blijft toegestaan als operationele fallback, bijvoorbeeld:
|
||||||
|
|
||||||
|
- `http://192.168.1.25:8765`
|
||||||
|
|
||||||
|
Maar dat is geen hoofdmodel en geen productbelofte.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Hoe de remote agent bekend wordt in WebManager
|
||||||
|
|
||||||
|
### Gekozen model: agent registreert zichzelf
|
||||||
|
|
||||||
|
De agent meldt zichzelf actief aan bij WebManager. Niet andersom.
|
||||||
|
|
||||||
|
Dat betekent:
|
||||||
|
|
||||||
|
- geen handmatig client-IP nodig als hoofdmodel
|
||||||
|
- geen server-naar-client discovery nodig
|
||||||
|
- geen afhankelijkheid van LAN-broadcasting
|
||||||
|
- geen probleem als het client-IP wisselt, zolang het geregistreerde endpoint actueel is
|
||||||
|
|
||||||
|
### Registratiestroom
|
||||||
|
|
||||||
|
Bij starten van de agent:
|
||||||
|
|
||||||
|
1. de agent leest lokale config
|
||||||
|
2. de agent bepaalt:
|
||||||
|
- `client_id`
|
||||||
|
- `display_name`
|
||||||
|
- `shares`
|
||||||
|
- `endpoint`
|
||||||
|
3. de agent registreert zich bij WebManager
|
||||||
|
4. WebManager slaat client-record op of werkt het bij
|
||||||
|
5. de agent stuurt periodieke heartbeats
|
||||||
|
|
||||||
|
### Benodigde velden bij registratie
|
||||||
|
|
||||||
|
Voorstel:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"client_id": "f4b2c8f8-2b1b-4d89-9ed2-8d6d7b1f3abc",
|
||||||
|
"display_name": "MacBook Pro van Jan",
|
||||||
|
"platform": "macos",
|
||||||
|
"agent_version": "1.1.0",
|
||||||
|
"endpoint": "http://192.168.1.25:8765",
|
||||||
|
"shares": [
|
||||||
|
{ "key": "downloads", "label": "Downloads" },
|
||||||
|
{ "key": "movies", "label": "Movies" },
|
||||||
|
{ "key": "pictures", "label": "Pictures" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Backend bewaart per client
|
||||||
|
|
||||||
|
- `client_id`
|
||||||
|
- `display_name`
|
||||||
|
- `platform`
|
||||||
|
- `agent_version`
|
||||||
|
- `endpoint`
|
||||||
|
- `shares`
|
||||||
|
- `last_seen`
|
||||||
|
- `status`
|
||||||
|
- `last_error`
|
||||||
|
- `reachable_at`
|
||||||
|
- eventueel `registration_token_id`
|
||||||
|
|
||||||
|
### Heartbeat
|
||||||
|
|
||||||
|
De agent stuurt elke 15-30 seconden een heartbeat.
|
||||||
|
|
||||||
|
Bijvoorbeeld:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"client_id": "f4b2c8f8-2b1b-4d89-9ed2-8d6d7b1f3abc",
|
||||||
|
"agent_version": "1.1.0"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Statusmodel
|
||||||
|
|
||||||
|
Deze velden moeten logisch gescheiden blijven:
|
||||||
|
|
||||||
|
- `last_seen`
|
||||||
|
Laatste succesvolle heartbeat van de agent.
|
||||||
|
- `status`
|
||||||
|
Afgeleide UI-status, bijvoorbeeld `online` of `offline`.
|
||||||
|
- `last_error`
|
||||||
|
Laatste connect- of browsefout richting agent.
|
||||||
|
- `reachable_at`
|
||||||
|
Laatste moment waarop een directe agent-call echt succesvol was.
|
||||||
|
|
||||||
|
Belangrijk:
|
||||||
|
|
||||||
|
- een heartbeat bepaalt niet automatisch dat elke browse-call werkt
|
||||||
|
- een enkele browse-timeout mag niet blind `last_seen` overschrijven
|
||||||
|
- status mag niet gaan flappen op basis van één los incident
|
||||||
|
|
||||||
|
### Aanbevolen statusregels
|
||||||
|
|
||||||
|
- `online` als `last_seen` recent is
|
||||||
|
- `offline` als heartbeat-timeout overschreden is
|
||||||
|
- extra foutdetails via `last_error`
|
||||||
|
- optioneel UI-label zoals `online with recent errors` later, maar niet nodig in V1.1
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Authenticatie en beveiliging
|
||||||
|
|
||||||
|
### Backend registratie-auth
|
||||||
|
|
||||||
|
Registratie vereist een bearer token.
|
||||||
|
|
||||||
|
Bijvoorbeeld:
|
||||||
|
|
||||||
|
- `Authorization: Bearer <registration-token>`
|
||||||
|
|
||||||
|
### Agent endpoint-auth
|
||||||
|
|
||||||
|
Alle agent-calls vereisen authenticatie. Niet alleen registratie.
|
||||||
|
|
||||||
|
Dus ook:
|
||||||
|
|
||||||
|
- `/health`
|
||||||
|
- `/api/list`
|
||||||
|
- `/api/info`
|
||||||
|
- `/api/read`
|
||||||
|
- `/api/download`
|
||||||
|
|
||||||
|
moeten beschermd zijn.
|
||||||
|
|
||||||
|
### V1.1 minimum
|
||||||
|
|
||||||
|
Voor V1.1 volstaat een eenvoudige gedeelde agent-token, bijvoorbeeld:
|
||||||
|
|
||||||
|
- WebManager bewaart een secret per client of per installatie
|
||||||
|
- backend stuurt dat token mee op elke agent-call
|
||||||
|
- agent weigert requests zonder geldig token
|
||||||
|
|
||||||
|
Voorbeeld:
|
||||||
|
|
||||||
|
- `Authorization: Bearer <agent-access-token>`
|
||||||
|
|
||||||
|
### Niet doen in V1.1
|
||||||
|
|
||||||
|
- open agent-HTTP API zonder auth
|
||||||
|
- browse/download endpoints publiek bereikbaar maken op het LAN
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Virtueel padmodel
|
||||||
|
|
||||||
|
Remote client shares krijgen een aparte namespace.
|
||||||
|
|
||||||
|
Voorstel voor de gebruikersweergave:
|
||||||
|
|
||||||
|
- `/Clients`
|
||||||
|
- `/Clients/<client-display>`
|
||||||
|
- `/Clients/<client-display>/<share-label>`
|
||||||
|
- `/Clients/<client-display>/<share-label>/subdir/file.ext`
|
||||||
|
|
||||||
|
Intern moet de backend dit mappen naar:
|
||||||
|
|
||||||
|
- `client_id`
|
||||||
|
- `share_key`
|
||||||
|
- relatief share-pad
|
||||||
|
|
||||||
|
Belangrijk:
|
||||||
|
|
||||||
|
- dit zijn logische WebManager-paden
|
||||||
|
- het zijn geen echte lokale backend filesystem-paden
|
||||||
|
- ze mogen niet door de bestaande lokale `PathGuard` resolved worden
|
||||||
|
|
||||||
|
### Consequentie voor de codebasis
|
||||||
|
|
||||||
|
`/Clients/...` moet vroeg in routing worden onderschept door een aparte browse- of source-facade.
|
||||||
|
|
||||||
|
Dus:
|
||||||
|
|
||||||
|
- niet de lokale `PathGuard` uitbreiden tot remote sources
|
||||||
|
- niet overal `if remote` in bestaande lokale services strooien
|
||||||
|
- wel een duidelijke scheiding tussen local source en remote source
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Share-validatie in de agent
|
||||||
|
|
||||||
|
De agent werkt niet met vrije absolute paden.
|
||||||
|
|
||||||
|
De agent heeft een vaste share-map, bijvoorbeeld:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"downloads": "/Users/jan/Downloads",
|
||||||
|
"movies": "/Users/jan/Movies",
|
||||||
|
"pictures": "/Users/jan/Pictures"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Een request bevat dan:
|
||||||
|
|
||||||
|
- `share = downloads`
|
||||||
|
- `path = Some/Subdir/file.txt`
|
||||||
|
|
||||||
|
Niet:
|
||||||
|
|
||||||
|
- `/Users/jan/...`
|
||||||
|
|
||||||
|
### Validatieregels
|
||||||
|
|
||||||
|
- onbekende `share` weigeren
|
||||||
|
- `..` weigeren
|
||||||
|
- pad resolven binnen de gekozen share-root
|
||||||
|
- symlink escape blokkeren
|
||||||
|
- alleen toegestane bestandshandelingen toestaan
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Read, preview en download limieten
|
||||||
|
|
||||||
|
V1.1 moet resource-grenzen expliciet vastleggen.
|
||||||
|
|
||||||
|
### Tekstpreview
|
||||||
|
|
||||||
|
- maximum grootte voor tekstpreview vastleggen
|
||||||
|
- voorstel: zelfde orde als huidige server-side preview/edit-limieten, of kleiner
|
||||||
|
- grote tekstbestanden niet volledig in memory laden voor preview
|
||||||
|
|
||||||
|
### Binary versus text
|
||||||
|
|
||||||
|
- agent moet tekstpreview alleen teruggeven voor ondersteunde teksttypes
|
||||||
|
- binaire content mag niet per ongeluk als tekst in JSON-responses worden gepusht
|
||||||
|
|
||||||
|
### Download
|
||||||
|
|
||||||
|
- downloads moeten gestreamd worden
|
||||||
|
- geen volledige bestand-buffering in memory
|
||||||
|
|
||||||
|
### Image preview
|
||||||
|
|
||||||
|
- alleen triviale image preview in V1.1
|
||||||
|
- geen zware thumbnail-pipeline in deze fase
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Offline gedrag
|
||||||
|
|
||||||
|
Dit is een harde eis.
|
||||||
|
|
||||||
|
WebManager mag niet vastlopen als de agent niet draait.
|
||||||
|
|
||||||
|
### Backendregels
|
||||||
|
|
||||||
|
- alle agent-calls krijgen korte timeouts, bijvoorbeeld 1-3 seconden
|
||||||
|
- connect- of timeoutfouten worden vertaald naar nette app-fouten
|
||||||
|
- offline agent blokkeert nooit globale pagina-initialisatie
|
||||||
|
- browse- en file-fouten blijven lokaal tot betreffende request
|
||||||
|
|
||||||
|
### Frontendregels
|
||||||
|
|
||||||
|
- `/Clients` mag laden, ook als sommige clients offline zijn
|
||||||
|
- offline clients mogen zichtbaar blijven in de lijst
|
||||||
|
- browsen in offline subtree toont foutmelding
|
||||||
|
- andere panes blijven bruikbaar
|
||||||
|
- geen endless spinner
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## API-ontwerp
|
||||||
|
|
||||||
|
## 1. Backend registry endpoints
|
||||||
|
|
||||||
|
### `POST /api/clients/register`
|
||||||
|
|
||||||
|
Registreert of update een remote agent.
|
||||||
|
|
||||||
|
### `POST /api/clients/heartbeat`
|
||||||
|
|
||||||
|
Werkt `last_seen` bij.
|
||||||
|
|
||||||
|
### `GET /api/clients`
|
||||||
|
|
||||||
|
Geeft bekende clients terug met:
|
||||||
|
|
||||||
|
- `client_id`
|
||||||
|
- `display_name`
|
||||||
|
- `status`
|
||||||
|
- `last_seen`
|
||||||
|
- `last_error`
|
||||||
|
- `shares`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Backend browse facade voor UI
|
||||||
|
|
||||||
|
De frontend blijft praten met gewone WebManager-routes.
|
||||||
|
|
||||||
|
### `GET /api/browse?path=/Clients`
|
||||||
|
|
||||||
|
Geeft alle bekende clients terug als directories.
|
||||||
|
|
||||||
|
### `GET /api/browse?path=/Clients/<client>/`
|
||||||
|
|
||||||
|
Geeft shares van die client terug als directories.
|
||||||
|
|
||||||
|
### `GET /api/browse?path=/Clients/<client>/<share>/...`
|
||||||
|
|
||||||
|
Backend vertaalt dit naar een agent-call.
|
||||||
|
|
||||||
|
Belangrijk:
|
||||||
|
|
||||||
|
- browse facade bepaalt eerst of pad onder `/Clients` valt
|
||||||
|
- alleen niet-remote paden mogen daarna naar bestaande lokale browse-paths
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Agent endpoints
|
||||||
|
|
||||||
|
Eenvoudig houden. Geen shell.
|
||||||
|
|
||||||
|
### `GET /health`
|
||||||
|
|
||||||
|
Gezondheidscheck met auth.
|
||||||
|
|
||||||
|
### `GET /api/list?share=downloads&path=subdir`
|
||||||
|
|
||||||
|
Directory-inhoud binnen een share.
|
||||||
|
|
||||||
|
### `GET /api/info?share=downloads&path=file.txt`
|
||||||
|
|
||||||
|
Metadata.
|
||||||
|
|
||||||
|
### `GET /api/read?share=downloads&path=file.txt`
|
||||||
|
|
||||||
|
Tekstpreview.
|
||||||
|
|
||||||
|
### `GET /api/download?share=downloads&path=file.txt`
|
||||||
|
|
||||||
|
Gestreamde download.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Haalbaarheid
|
||||||
|
|
||||||
|
## Goed haalbaar in V1.1
|
||||||
|
|
||||||
|
- client registry
|
||||||
|
- heartbeat online/offline
|
||||||
|
- virtuele `Clients` root
|
||||||
|
- browse
|
||||||
|
- file info
|
||||||
|
- tekstpreview
|
||||||
|
- eenvoudige image preview
|
||||||
|
- gestreamde download
|
||||||
|
|
||||||
|
## Bewust uitgesteld
|
||||||
|
|
||||||
|
- rename
|
||||||
|
- mkdir
|
||||||
|
- delete
|
||||||
|
- upload
|
||||||
|
- bookmarks/startup paths
|
||||||
|
- cross-source copy
|
||||||
|
- cross-source move
|
||||||
|
- unified history
|
||||||
|
- task-runner integratie
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Veranderingen per gebied
|
||||||
|
|
||||||
|
## Backend
|
||||||
|
|
||||||
|
Nieuwe onderdelen:
|
||||||
|
|
||||||
|
- client registry repository
|
||||||
|
- client registry service
|
||||||
|
- routes voor register/heartbeat/list
|
||||||
|
- browse/source facade voor `Clients/...`
|
||||||
|
- agent HTTP client met harde timeouts en auth
|
||||||
|
|
||||||
|
Bestaande onderdelen die waarschijnlijk geraakt worden:
|
||||||
|
|
||||||
|
- [routes_browse.py](/workspace/webmanager-mvp/webui/backend/app/api/routes_browse.py)
|
||||||
|
Om `/Clients` vroeg te routeren.
|
||||||
|
- [dependencies.py](/workspace/webmanager-mvp/webui/backend/app/dependencies.py)
|
||||||
|
Voor nieuwe registry- en agent-services.
|
||||||
|
- [app/main.py](/workspace/webmanager-mvp/webui/backend/app/main.py)
|
||||||
|
Voor nieuwe routers.
|
||||||
|
|
||||||
|
Liever niet verbreden:
|
||||||
|
|
||||||
|
- [path_guard.py](/workspace/webmanager-mvp/webui/backend/app/security/path_guard.py)
|
||||||
|
Deze hoort lokaal filesystemgericht te blijven.
|
||||||
|
- [file_ops_service.py](/workspace/webmanager-mvp/webui/backend/app/services/file_ops_service.py)
|
||||||
|
Deze service is nu server-filesystemgericht en moet niet vervuild raken met remote transportlogica.
|
||||||
|
|
||||||
|
## Frontend
|
||||||
|
|
||||||
|
Waarschijnlijk aanpassen:
|
||||||
|
|
||||||
|
- [app.js](/workspace/webmanager-mvp/webui/html/app.js)
|
||||||
|
Voor:
|
||||||
|
- extra virtuele root
|
||||||
|
- render van clients en shares
|
||||||
|
- offline status
|
||||||
|
- source-aware browse/view/download/info flows
|
||||||
|
- [index.html](/workspace/webmanager-mvp/webui/html/index.html)
|
||||||
|
Alleen als extra statuslabels of clientindicatoren nodig zijn
|
||||||
|
|
||||||
|
## Remote agent
|
||||||
|
|
||||||
|
Te baseren op:
|
||||||
|
|
||||||
|
- [finder_commander/app/main.py](/workspace/webmanager-mvp/finder_commander/app/main.py)
|
||||||
|
- [finder_commander/run-local.sh](/workspace/webmanager-mvp/finder_commander/run-local.sh)
|
||||||
|
- [finder_commander/requirements.txt](/workspace/webmanager-mvp/finder_commander/requirements.txt)
|
||||||
|
|
||||||
|
Maar vereenvoudigd:
|
||||||
|
|
||||||
|
- geen shell command endpoint
|
||||||
|
- geen hele home-root
|
||||||
|
- alleen `share key + relatief pad`
|
||||||
|
- registratie en heartbeat toevoegen
|
||||||
|
- auth afdwingen op alle endpoints
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Minimale agent-config
|
||||||
|
|
||||||
|
Voorstel lokaal configbestand:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"webmanager_base_url": "https://webmanager.example.com",
|
||||||
|
"registration_token": "registration-secret",
|
||||||
|
"agent_access_token": "agent-secret",
|
||||||
|
"client_id": "f4b2c8f8-2b1b-4d89-9ed2-8d6d7b1f3abc",
|
||||||
|
"display_name": "MacBook Pro van Jan",
|
||||||
|
"shares": {
|
||||||
|
"downloads": "/Users/jan/Downloads",
|
||||||
|
"movies": "/Users/jan/Movies",
|
||||||
|
"pictures": "/Users/jan/Pictures"
|
||||||
|
},
|
||||||
|
"listen_host": "0.0.0.0",
|
||||||
|
"listen_port": 8765,
|
||||||
|
"public_endpoint": "http://192.168.1.25:8765"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Opmerking:
|
||||||
|
|
||||||
|
- `public_endpoint` is het endpoint dat WebManager gebruikt
|
||||||
|
- `listen_host` en `public_endpoint` hoeven niet identiek te zijn
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Open keuzes die bewust zijn uitgesteld
|
||||||
|
|
||||||
|
Deze keuzes zijn echt later werk, niet meer V1.1:
|
||||||
|
|
||||||
|
- reverse-connect of tunnelmodel
|
||||||
|
- cross-source copy
|
||||||
|
- cross-source move
|
||||||
|
- bookmarks/startup paths voor `/Clients/...`
|
||||||
|
- write-acties op remote shares
|
||||||
|
- sterkere pairing of key rotation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Beslisadvies
|
||||||
|
|
||||||
|
Aanbevolen implementatievolgorde voor V1.1:
|
||||||
|
|
||||||
|
1. agent registry + heartbeat
|
||||||
|
2. virtuele `Clients` root in browse
|
||||||
|
3. online/offline status met gescheiden statusvelden
|
||||||
|
4. browse/info/preview/download voor remote shares
|
||||||
|
|
||||||
|
Niet in V1.1:
|
||||||
|
|
||||||
|
5. write-acties
|
||||||
|
6. bookmarks/startup paths
|
||||||
|
7. cross-source flows
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Samenvatting
|
||||||
|
|
||||||
|
De juiste V1.1-richting is:
|
||||||
|
|
||||||
|
- geen hele homefolder
|
||||||
|
- wel beperkte shares zoals `Downloads`, `Movies`, `Pictures`
|
||||||
|
- remote helper-agent op macOS
|
||||||
|
- agent registreert zichzelf bij WebManager
|
||||||
|
- WebManager bewaart `client_id`-geleide registry en status
|
||||||
|
- `/Clients` wordt een aparte virtuele bron
|
||||||
|
- remote paden blijven buiten lokale filesystem services
|
||||||
|
- alle agent-calls vereisen auth
|
||||||
|
- offline agents mogen nooit de rest van WebManager verstoren
|
||||||
|
|
||||||
|
Dit model is haalbaar, beperkt in scope, en houdt de bestaande lokale storage-architectuur schoon.
|
||||||
@@ -0,0 +1,177 @@
|
|||||||
|
# Theme Selection v1
|
||||||
|
|
||||||
|
## 1. Doel
|
||||||
|
Theme-selectie voegt nu waarde toe omdat de UI al een nette light/dark basis heeft, maar nog geen expliciet onderscheid maakt tussen:
|
||||||
|
- `theme`: de stijlset
|
||||||
|
- `mode`: light of dark binnen die stijlset
|
||||||
|
|
||||||
|
Dat onderscheid maakt de UI uitbreidbaar zonder de dagelijkse snelle UX kwijt te raken. Dit past logisch in de bestaande Settings-structuur:
|
||||||
|
- `General` voor functionele voorkeuren
|
||||||
|
- `Interface` voor theme-keuze
|
||||||
|
- `Logs` voor recente acties
|
||||||
|
|
||||||
|
## 2. Scope
|
||||||
|
Theme Selection v1 omvat:
|
||||||
|
- nieuw Settings-tabblad: `Interface`
|
||||||
|
- daarin alleen een pulldown/select: `Theme`
|
||||||
|
- bestaande snelle dark/light toggle blijft in de hoofdinterface bestaan
|
||||||
|
- beide keuzes worden opgeslagen in bestaande SQLite settings-opslag
|
||||||
|
- app leest beide waarden bij startup via backend en past die direct toe
|
||||||
|
|
||||||
|
Niet in scope:
|
||||||
|
- vrije CSS-bestandskeuze
|
||||||
|
- padinvoer
|
||||||
|
- upload van themes
|
||||||
|
- custom theme editor
|
||||||
|
- theme packs van externe bron
|
||||||
|
|
||||||
|
## 3. Theme-model
|
||||||
|
Aanbevolen model voor v1:
|
||||||
|
- werk met een whitelist van toegestane theme keys
|
||||||
|
- werk daarnaast met een aparte whitelist van toegestane color modes
|
||||||
|
- sla beide als strings op in settings
|
||||||
|
|
||||||
|
Aanbevolen settings voor v1:
|
||||||
|
- `selected_theme: string | null`
|
||||||
|
- `selected_color_mode: string | null`
|
||||||
|
|
||||||
|
Whitelist v1:
|
||||||
|
- `selected_theme`
|
||||||
|
- `default`
|
||||||
|
- `selected_color_mode`
|
||||||
|
- `dark`
|
||||||
|
- `light`
|
||||||
|
|
||||||
|
Waarom dit veiliger en eenvoudiger is dan bestandsselectie:
|
||||||
|
- geen vrije filesystemtoegang nodig
|
||||||
|
- geen risico op ongeldige of kwaadaardige CSS-inhoud
|
||||||
|
- geen extra upload- of assetbeheer
|
||||||
|
- duidelijke validatie in backend mogelijk
|
||||||
|
- stabiel contract tussen backend setting en frontend rendering
|
||||||
|
|
||||||
|
## 4. Settings-opslag
|
||||||
|
Nieuwe settings in bestaande settings-opslag:
|
||||||
|
- `selected_theme`
|
||||||
|
- `selected_color_mode`
|
||||||
|
|
||||||
|
Semantiek:
|
||||||
|
- `selected_theme = null` betekent: fallback naar veilige default `default`
|
||||||
|
- `selected_color_mode = null` betekent: fallback naar veilige default `dark`
|
||||||
|
- onbekende opgeslagen waarden betekenen: negeren en fallback toepassen
|
||||||
|
|
||||||
|
Aanbevolen effectieve defaults:
|
||||||
|
- theme -> `default`
|
||||||
|
- mode -> `dark`
|
||||||
|
|
||||||
|
## 5. Settings UI
|
||||||
|
Tabs in Settings worden:
|
||||||
|
- `General`
|
||||||
|
- `Interface`
|
||||||
|
- `Logs`
|
||||||
|
|
||||||
|
`Interface` bevat in v1 alleen:
|
||||||
|
- label: `Theme`
|
||||||
|
- een select/pulldown met toegestane themes
|
||||||
|
|
||||||
|
Belangrijk:
|
||||||
|
- geen dark/light selector in `Settings > Interface`
|
||||||
|
- dark/light blijft een snelle hoofdinterface-actie
|
||||||
|
|
||||||
|
Aanbevolen v1-UX:
|
||||||
|
- select toont huidige theme-keuze
|
||||||
|
- gebruiker kiest andere waarde
|
||||||
|
- opslaan gebeurt via bestaande settings-saveflow
|
||||||
|
- keuze wordt direct toegepast in de UI na succesvolle backend-save
|
||||||
|
|
||||||
|
## 6. Frontend-impact
|
||||||
|
Frontend moet bij startup vroeg settings laden en daaruit beide waarden ophalen:
|
||||||
|
- `selected_theme`
|
||||||
|
- `selected_color_mode`
|
||||||
|
|
||||||
|
Daarna bepaalt frontend het effectieve UI-theme. Aanbevolen intern model:
|
||||||
|
- `data-theme="default-dark"`
|
||||||
|
- `data-theme="default-light"`
|
||||||
|
|
||||||
|
Aanbevolen volgorde:
|
||||||
|
1. `GET /api/settings`
|
||||||
|
2. bepaal effectief theme + mode
|
||||||
|
3. zet `document.documentElement.dataset.theme`
|
||||||
|
4. initialiseer de rest van de UI
|
||||||
|
|
||||||
|
Relatie met bestaande light/dark toggle:
|
||||||
|
- toggle blijft bestaan in de hoofdinterface
|
||||||
|
- toggle wijzigt alleen `selected_color_mode`
|
||||||
|
- toggle schrijft dus naar backend, niet naar localStorage
|
||||||
|
|
||||||
|
Reden:
|
||||||
|
- snelle dagelijkse UX blijft behouden
|
||||||
|
- `Settings > Interface` blijft schoon en beperkt tot theme-keuze
|
||||||
|
- theme en mode blijven conceptueel gescheiden
|
||||||
|
|
||||||
|
## 7. Backend-impact
|
||||||
|
Bestaande settings-API wordt uitgebreid met:
|
||||||
|
- `selected_theme`
|
||||||
|
- `selected_color_mode`
|
||||||
|
|
||||||
|
Benodigd:
|
||||||
|
- whitelistvalidatie op backend
|
||||||
|
- onbekende waarden blokkeren bij write
|
||||||
|
- bestaande settings repository/service/API uitbreiden
|
||||||
|
|
||||||
|
Niet nodig:
|
||||||
|
- nieuwe dependency
|
||||||
|
- vrije filesystemtoegang
|
||||||
|
- nieuwe asset-uploadroute
|
||||||
|
|
||||||
|
## 8. Regressierisico
|
||||||
|
Belangrijkste risico's:
|
||||||
|
- startup-volgorde: theme moet vroeg genoeg worden toegepast om flicker te beperken
|
||||||
|
- bestaande theme-toggle logica conflicteert nu nog met localStorage
|
||||||
|
- onbekende opgeslagen theme/mode-waarden moeten veilig terugvallen
|
||||||
|
- Settings-tabcomplexiteit mag niet onnodig toenemen
|
||||||
|
|
||||||
|
Belangrijkste mitigaties:
|
||||||
|
- één centrale frontendfunctie die theme en mode uit backend toepast
|
||||||
|
- localStorage volledig verwijderen als leidende theme-bron
|
||||||
|
- backend whitelistvalidatie
|
||||||
|
- fallback naar `default-dark`
|
||||||
|
|
||||||
|
## 9. Teststrategie
|
||||||
|
Backend golden tests:
|
||||||
|
- default `selected_theme`
|
||||||
|
- default `selected_color_mode`
|
||||||
|
- geldige theme save (`default`)
|
||||||
|
- geldige color mode save (`dark`, `light`)
|
||||||
|
- ongeldige theme key wordt geblokkeerd
|
||||||
|
- ongeldige color mode wordt geblokkeerd
|
||||||
|
- settings response bevat beide velden
|
||||||
|
|
||||||
|
UI smoke/regressietests:
|
||||||
|
- `Settings` bevat tabs `General`, `Interface`, `Logs`
|
||||||
|
- `Interface` tab bevat alleen theme select
|
||||||
|
- hoofdinterface bevat nog steeds dark/light toggle
|
||||||
|
- app leest beide settings via backend
|
||||||
|
- fallback bij ontbrekende/ongeldige waarde breekt startup niet
|
||||||
|
|
||||||
|
Handmatige validatie:
|
||||||
|
- theme wijzigen in `Settings > Interface`
|
||||||
|
- mode wisselen via toggle in de hoofdinterface
|
||||||
|
- app herladen en controleren dat beide keuzes behouden blijven
|
||||||
|
- controle dat light/dark correct doorwerken in modals, panelen en editor/viewers
|
||||||
|
|
||||||
|
## 10. Aanbeveling
|
||||||
|
Aanbevolen v1-richting met laag regressierisico:
|
||||||
|
- voeg `Interface` tab toe
|
||||||
|
- voeg `selected_theme` en `selected_color_mode` toe aan bestaande settings-opslag
|
||||||
|
- werk alleen met veilige whitelists
|
||||||
|
- houd v1 beperkt tot:
|
||||||
|
- theme: `default`
|
||||||
|
- mode: `dark|light`
|
||||||
|
- laat startup en toggle beide backendpersistente settings gebruiken
|
||||||
|
- fallback altijd veilig naar `default-dark`
|
||||||
|
|
||||||
|
Deze richting is:
|
||||||
|
- simpel
|
||||||
|
- veilig
|
||||||
|
- onderhoudbaar
|
||||||
|
- duidelijk uitbreidbaar naar extra themes, zonder de dagelijkse dark/light UX opnieuw te moeten ontwerpen
|
||||||
@@ -0,0 +1,252 @@
|
|||||||
|
1 analyse
|
||||||
|
|
||||||
|
De repo heeft al een bruikbaar taskmodel voor copy, move, download en duplicate, maar de main WebUI gebruikt dat model voor copy/move nog nauwelijks. In de hoofd-UI ziet de gebruiker na start nu vooral een korte statusregel of summary; live voortgang staat feitelijk alleen in `F1 > Settings > Logs`. Daardoor ontbreekt directe, persistente feedback in de hoofd-UI en is er geen zichtbare rem op dubbel starten.
|
||||||
|
|
||||||
|
Belangrijkste conclusie:
|
||||||
|
|
||||||
|
- Copy en move hebben al echte backend-tasks met progressvelden.
|
||||||
|
- De bron van truth voor lopende copy/move-taken is al `/api/tasks`.
|
||||||
|
- Er bestaat nu geen cancel/abort voor copy of move.
|
||||||
|
- Een eerlijke abortknop voor copy/move kan dus nu niet frontend-only worden toegevoegd.
|
||||||
|
- De kleinste veilige stap is een compacte live task-indicator in de bestaande header/toolbar-zone, gevoed door de bestaande task-feed.
|
||||||
|
|
||||||
|
2 bestaande functionaliteit
|
||||||
|
|
||||||
|
A. Taskmodel / backend
|
||||||
|
|
||||||
|
- `copy` en `move` gebruiken hetzelfde taskmechanisme via [tasks_runner.py](/workspace/webmanager-mvp/webui/backend/app/tasks_runner.py), [task_repository.py](/workspace/webmanager-mvp/webui/backend/app/db/task_repository.py), [copy_task_service.py](/workspace/webmanager-mvp/webui/backend/app/services/copy_task_service.py) en [move_task_service.py](/workspace/webmanager-mvp/webui/backend/app/services/move_task_service.py).
|
||||||
|
- Taskstatussen die al bestaan in [task_repository.py](/workspace/webmanager-mvp/webui/backend/app/db/task_repository.py):
|
||||||
|
- `queued`
|
||||||
|
- `running`
|
||||||
|
- `completed`
|
||||||
|
- `failed`
|
||||||
|
- daarnaast voor download ook `requested`, `preparing`, `ready`, `cancelled`
|
||||||
|
- Progressinformatie bestaat al:
|
||||||
|
- files: `done_bytes`, `total_bytes`, `current_item`
|
||||||
|
- batch/directory: `done_items`, `total_items`, `current_item`
|
||||||
|
- Copy:
|
||||||
|
- file copy gebruikt byte-progress callback
|
||||||
|
- directory copy is grof: `0/1` naar `1/1`
|
||||||
|
- batch copy gebruikt item-progress
|
||||||
|
- Move:
|
||||||
|
- same-root file move heeft praktisch geen tussentijdse progress, alleen start/einde
|
||||||
|
- cross-root file move gebruikt copy-progress en delete na afloop
|
||||||
|
- directory move is grof `0/1` naar `1/1`
|
||||||
|
- batch move gebruikt item-progress
|
||||||
|
- Er is al read-API voor tasks:
|
||||||
|
- `GET /api/tasks`
|
||||||
|
- `GET /api/tasks/{task_id}`
|
||||||
|
- Er is geen cancel-API voor copy/move.
|
||||||
|
- De enige echte cancel in de repo zit nu bij archive-downloads in [archive_download_task_service.py](/workspace/webmanager-mvp/webui/backend/app/services/archive_download_task_service.py) en `POST /api/files/download/archive/{task_id}/cancel`.
|
||||||
|
- Copy/move workers in [tasks_runner.py](/workspace/webmanager-mvp/webui/backend/app/tasks_runner.py) hebben geen cooperative cancel checks.
|
||||||
|
- Copy/move history bestaat al via [history_repository.py](/workspace/webmanager-mvp/webui/backend/app/db/history_repository.py): `queued`, `completed`, `failed`.
|
||||||
|
|
||||||
|
B. Bestaande frontend feedback
|
||||||
|
|
||||||
|
- In de hoofd-UI starten copy en move vanuit [app.js](/workspace/webmanager-mvp/webui/html/app.js):
|
||||||
|
- `startCopySelected()`
|
||||||
|
- `executeMoveSelection()`
|
||||||
|
- Huidige feedback voor copy/move:
|
||||||
|
- `setStatus(...)` onderin/headerstatus
|
||||||
|
- `showActionSummary(...)`
|
||||||
|
- `openFeedbackModal(...)` via `actions-error`
|
||||||
|
- Die feedback is niet persistent als live taskweergave.
|
||||||
|
- Er is nu geen compacte taskindicator in de hoofd-UI.
|
||||||
|
- `state.selectedTaskId` en `refreshTasksSnapshot()` bestaan al in [app.js](/workspace/webmanager-mvp/webui/html/app.js), maar worden voor copy/move alleen gebruikt om een snapshotcount op te halen; er is geen zichtbare hoofd-UI-component die dit toont.
|
||||||
|
- Buiten download is er geen modal of popover voor actieve taken in de hoofd-UI.
|
||||||
|
|
||||||
|
C. Logs / history / settings
|
||||||
|
|
||||||
|
- `F1 > Settings > Logs` toont al twee side-by-side secties:
|
||||||
|
- `Tasks`
|
||||||
|
- `History`
|
||||||
|
- Deze UI gebruikt al de bestaande feeds:
|
||||||
|
- `/api/tasks`
|
||||||
|
- `/api/history`
|
||||||
|
- Polling bestaat al in [app.js](/workspace/webmanager-mvp/webui/html/app.js):
|
||||||
|
- `loadTasksForSettings()`
|
||||||
|
- `loadHistoryForSettings()`
|
||||||
|
- `loadLogsAndTasksForSettings()`
|
||||||
|
- `scheduleSettingsLogsPolling()`
|
||||||
|
- De UI rendert taskdetails al compact via `formatTaskLine(task)`:
|
||||||
|
- status
|
||||||
|
- source/destination
|
||||||
|
- `done_items/total_items`
|
||||||
|
- `current_item`
|
||||||
|
- Dat betekent dat de repo al een bruikbare frontend formatteringslaag heeft die ook buiten Settings herbruikbaar is.
|
||||||
|
|
||||||
|
D. Abort/cancel haalbaarheid
|
||||||
|
|
||||||
|
- Copy/move kunnen nu technisch niet veilig worden afgebroken via bestaande code.
|
||||||
|
- Er is geen taskstatus-overgang of API-contract voor copy/move-cancel.
|
||||||
|
- Er is geen cooperative worker-check in copy/move loops.
|
||||||
|
- Er is geen rollback.
|
||||||
|
- Eerlijke cancelsemantiek voor copy/move zou dus moeten zijn:
|
||||||
|
- stop resterende verwerking zo snel mogelijk op een checkpunt
|
||||||
|
- reeds verwerkte bestanden blijven zoals ze zijn
|
||||||
|
- geen rollback
|
||||||
|
- Maar die semantiek is nog niet geïmplementeerd.
|
||||||
|
- Conclusie: een abortknop voor copy/move is nu buiten scope zonder backendwerk.
|
||||||
|
|
||||||
|
3 scope
|
||||||
|
|
||||||
|
Minimale veilige volgende stap, op basis van wat al bestaat:
|
||||||
|
|
||||||
|
- frontend-only hoofd-UI verbetering
|
||||||
|
- geen layoutwijziging van de dual-pane browse-UI
|
||||||
|
- geen nieuw vast paneel
|
||||||
|
- wel een compacte task/status chip in bestaande headerbar of function-bar zone
|
||||||
|
- alleen zichtbaar als er actieve taken zijn (`queued`, `running`, en eventueel download `requested/preparing`)
|
||||||
|
- klik opent een kleine popover/dropdown met actieve taken
|
||||||
|
- popover hergebruikt bestaande taskdata en formattering uit `/api/tasks`
|
||||||
|
- popover bevat link/actie naar `F1 > Settings > Logs`
|
||||||
|
- geen abortknop voor copy/move in deze fase
|
||||||
|
|
||||||
|
Waarom dit binnen scope past:
|
||||||
|
|
||||||
|
- gebruikt bestaande task-feed
|
||||||
|
- gebruikt bestaande taaksemantiek
|
||||||
|
- verandert de hoofd-layout niet
|
||||||
|
- geeft persistente feedback zonder modal-first patroon
|
||||||
|
- is compatibel met de OneDrive-achtige richting: compacte indicator, detail op aanvraag
|
||||||
|
|
||||||
|
4 impact
|
||||||
|
|
||||||
|
Positief:
|
||||||
|
|
||||||
|
- gebruiker ziet direct in de hoofd-UI dat copy/move loopt
|
||||||
|
- feedback blijft zichtbaar zolang taak actief is
|
||||||
|
- minder kans op dubbel starten
|
||||||
|
- geen extra structureel paneel
|
||||||
|
- F1 Logs blijft intact als detailbron
|
||||||
|
|
||||||
|
Beperkingen:
|
||||||
|
|
||||||
|
- zonder backendwerk is er nog geen eerlijke cancel voor copy/move
|
||||||
|
- progress blijft zo nauwkeurig als bestaande taskdata toelaat
|
||||||
|
- same-root move en directory move blijven qua progress relatief grof
|
||||||
|
|
||||||
|
5 risico
|
||||||
|
|
||||||
|
Laag tot middel als alleen de voorgestelde frontendstap wordt gebouwd.
|
||||||
|
|
||||||
|
Belangrijkste risico’s:
|
||||||
|
|
||||||
|
- polling in de hoofd-UI kan onrustig worden als hij niet net zo stabiel wordt gebouwd als de bestaande Settings-polling
|
||||||
|
- een te opvallende indicator kan visueel concurreren met de bestaande headerstatus
|
||||||
|
- als een abortknop zonder backendsteun zou worden toegevoegd, zou dat misleidend zijn; dat moet expliciet niet gebeuren
|
||||||
|
|
||||||
|
Expliciet risico buiten scope:
|
||||||
|
|
||||||
|
- copy/move-cancel vereist backend-aanpassing aan taskmodel, runner en waarschijnlijk history
|
||||||
|
|
||||||
|
6 testplan
|
||||||
|
|
||||||
|
Voor de minimale frontendstap:
|
||||||
|
|
||||||
|
- gerichte UI smoke/golden checks voor:
|
||||||
|
- indicator aanwezig in header/toolbar markup
|
||||||
|
- indicator alleen bedoeld voor actieve taken
|
||||||
|
- popover/dropdown markup aanwezig
|
||||||
|
- link naar bestaande logs-entrypoint aanwezig
|
||||||
|
- gerichte JS-checks voor:
|
||||||
|
- actieve taken worden uit `/api/tasks` gefilterd
|
||||||
|
- `queued`/`running` tonen indicator
|
||||||
|
- `completed`/`failed` verdwijnen uit de actieve indicator
|
||||||
|
- polling start/stop logisch zonder extra layoutreset
|
||||||
|
- geen backend golden updates nodig zolang `/api/tasks` contract ongewijzigd blijft
|
||||||
|
|
||||||
|
Niet nu testen:
|
||||||
|
|
||||||
|
- abort voor copy/move, want die functionaliteit bestaat nog niet
|
||||||
|
|
||||||
|
7 acceptatiecriteria
|
||||||
|
|
||||||
|
Voor de voorgestelde minimale stap:
|
||||||
|
|
||||||
|
- Een gestart copy- of move-proces is zichtbaar in de hoofd-UI zonder navigatie naar `F1 > Settings / Logs`.
|
||||||
|
- De oplossing verandert de dual-panel layout niet structureel.
|
||||||
|
- De feedback blijft zichtbaar zolang de taak actief is.
|
||||||
|
- De oplossing gebruikt bestaande taskdata als bron van truth.
|
||||||
|
- Er wordt geen fake progress getoond.
|
||||||
|
- Er wordt geen fake cancelknop getoond voor copy/move.
|
||||||
|
- Bestaande task/log/history-functionaliteit blijft intact.
|
||||||
|
- API-contract blijft ongewijzigd.
|
||||||
|
|
||||||
|
Voor abort/cancel:
|
||||||
|
|
||||||
|
- Niet acceptabel in deze fase zonder backendsteun.
|
||||||
|
- Eerst aparte backendfase nodig.
|
||||||
|
|
||||||
|
8 codex-uitvoering / voorstel
|
||||||
|
|
||||||
|
Huidige stap:
|
||||||
|
|
||||||
|
- Alleen analyse uitgevoerd.
|
||||||
|
- Geen functionele implementatie gedaan.
|
||||||
|
|
||||||
|
Waarom:
|
||||||
|
|
||||||
|
- `CHANGE_POLICY.md` zegt dat frontend flow aanpassen eerst een voorstel nodig heeft.
|
||||||
|
- De opdracht vroeg expliciet om eerst grondige repo-inspectie en pas daarna een minimaal voorstel.
|
||||||
|
- Cancel/abort voor copy/move is niet eerlijk implementeerbaar zonder backendwerk.
|
||||||
|
|
||||||
|
Minimaal wijzigingsvoorstel dat ik hierna zou uitvoeren als vervolgstap:
|
||||||
|
|
||||||
|
1. Frontend-only compacte task chip
|
||||||
|
- plaats in `#title-zone-actions` of direct naast `#status`
|
||||||
|
- toont bijvoorbeeld:
|
||||||
|
- `1 task running`
|
||||||
|
- `3 active tasks`
|
||||||
|
|
||||||
|
2. Kleine popover/dropdown
|
||||||
|
- opent op klik op de chip
|
||||||
|
- toont alleen actieve taken uit `/api/tasks`
|
||||||
|
- hergebruikt bestaande `formatTaskLine(task)` of een kleine variant daarop
|
||||||
|
- toont eerlijke status:
|
||||||
|
- `queued`
|
||||||
|
- `running`
|
||||||
|
- eventueel later download `requested/preparing`
|
||||||
|
|
||||||
|
3. Polling hergebruik
|
||||||
|
- hergebruik bestaande `/api/tasks`
|
||||||
|
- implementeer lichte polling alleen als er actieve taken zijn of als de popover open is
|
||||||
|
- gebruik stabiele rerender-aanpak zoals in Settings > Logs
|
||||||
|
|
||||||
|
4. Doorgang naar detail
|
||||||
|
- knop of link `View in Logs`
|
||||||
|
- opent bestaande `F1 > Settings > Logs`
|
||||||
|
|
||||||
|
5. Expliciet nog niet doen
|
||||||
|
- geen cancelknop voor copy/move
|
||||||
|
- geen extra paneel
|
||||||
|
- geen fake progressbar
|
||||||
|
|
||||||
|
Vervolgvoorstel voor latere backendfase als abort gewenst is:
|
||||||
|
|
||||||
|
- copy/move taskstatus uitbreiden met `cancelled`
|
||||||
|
- cancel-endpoint voor copy/move
|
||||||
|
- cooperative checks in `TaskRunner` tussen items/chunks
|
||||||
|
- eerlijke semantiek:
|
||||||
|
- stop resterende verwerking
|
||||||
|
- reeds verwerkte bestanden blijven bestaan
|
||||||
|
- geen rollback
|
||||||
|
|
||||||
|
9 gewijzigde bestanden
|
||||||
|
|
||||||
|
- [project_docs/UI_FEEDACK.md](/workspace/webmanager-mvp/project_docs/UI_FEEDACK.md)
|
||||||
|
|
||||||
|
10 uitgevoerde tests
|
||||||
|
|
||||||
|
Wel gedaan:
|
||||||
|
|
||||||
|
- code-inspectie van backend taskmodel, runners, services, routes en frontend task/log UI
|
||||||
|
|
||||||
|
Niet gedaan:
|
||||||
|
|
||||||
|
- geen functionele tests
|
||||||
|
- geen implementatiechecks
|
||||||
|
|
||||||
|
Reden:
|
||||||
|
|
||||||
|
- deze stap is bewust alleen analyse + voorstel, geen implementatie
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -14,4 +14,9 @@ async def copy_file(
|
|||||||
request: CopyRequest,
|
request: CopyRequest,
|
||||||
service: CopyTaskService = Depends(get_copy_task_service),
|
service: CopyTaskService = Depends(get_copy_task_service),
|
||||||
) -> TaskCreateResponse:
|
) -> TaskCreateResponse:
|
||||||
|
if request.sources is not None:
|
||||||
|
return service.create_batch_copy_task(
|
||||||
|
sources=request.sources,
|
||||||
|
destination_base=request.destination_base,
|
||||||
|
)
|
||||||
return service.create_copy_task(source=request.source, destination=request.destination)
|
return service.create_copy_task(source=request.source, destination=request.destination)
|
||||||
|
|||||||
@@ -0,0 +1,17 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
|
from backend.app.api.schemas import DuplicateRequest, TaskCreateResponse
|
||||||
|
from backend.app.dependencies import get_duplicate_task_service
|
||||||
|
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/files")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/duplicate", response_model=TaskCreateResponse, status_code=202)
|
||||||
|
async def duplicate_paths(
|
||||||
|
request: DuplicateRequest,
|
||||||
|
service: DuplicateTaskService = Depends(get_duplicate_task_service),
|
||||||
|
) -> TaskCreateResponse:
|
||||||
|
return service.create_duplicate_task(paths=request.paths)
|
||||||
@@ -1,10 +1,13 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, Request
|
from fastapi import APIRouter, Depends, File, Form, Query, Request, UploadFile
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
|
from starlette.background import BackgroundTask
|
||||||
|
|
||||||
from backend.app.api.schemas import DeleteRequest, DeleteResponse, FileInfoResponse, MkdirRequest, MkdirResponse, RenameRequest, RenameResponse, SaveRequest, SaveResponse, ViewResponse
|
from backend.app.api.schemas import ArchivePrepareRequest, DeleteRequest, FileInfoResponse, MkdirRequest, MkdirResponse, RenameRequest, RenameResponse, SaveRequest, SaveResponse, TaskCreateResponse, TaskDetailResponse, UploadResponse, ViewResponse
|
||||||
from backend.app.dependencies import get_file_ops_service
|
from backend.app.dependencies import get_archive_download_task_service, get_delete_task_service, get_file_ops_service
|
||||||
|
from backend.app.services.archive_download_task_service import ArchiveDownloadTaskService
|
||||||
|
from backend.app.services.delete_task_service import DeleteTaskService
|
||||||
from backend.app.services.file_ops_service import FileOpsService
|
from backend.app.services.file_ops_service import FileOpsService
|
||||||
|
|
||||||
router = APIRouter(prefix="/files")
|
router = APIRouter(prefix="/files")
|
||||||
@@ -26,12 +29,24 @@ async def rename(
|
|||||||
return service.rename(path=request.path, new_name=request.new_name)
|
return service.rename(path=request.path, new_name=request.new_name)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/delete", response_model=DeleteResponse)
|
@router.post("/delete", response_model=TaskCreateResponse, status_code=202)
|
||||||
async def delete(
|
async def delete(
|
||||||
request: DeleteRequest,
|
request: DeleteRequest,
|
||||||
|
service: DeleteTaskService = Depends(get_delete_task_service),
|
||||||
|
) -> TaskCreateResponse:
|
||||||
|
if request.paths is not None:
|
||||||
|
return service.create_batch_delete_task(paths=request.paths, recursive_paths=request.recursive_paths or [])
|
||||||
|
return service.create_delete_task(path=request.path, recursive=request.recursive)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/upload", response_model=UploadResponse)
|
||||||
|
async def upload(
|
||||||
|
target_path: str = Form(...),
|
||||||
|
overwrite: bool = Form(False),
|
||||||
|
file: UploadFile = File(...),
|
||||||
service: FileOpsService = Depends(get_file_ops_service),
|
service: FileOpsService = Depends(get_file_ops_service),
|
||||||
) -> DeleteResponse:
|
) -> UploadResponse:
|
||||||
return service.delete(path=request.path)
|
return service.upload(target_path=target_path, upload_file=file, overwrite=overwrite)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/view", response_model=ViewResponse)
|
@router.get("/view", response_model=ViewResponse)
|
||||||
@@ -51,6 +66,51 @@ async def info(
|
|||||||
return service.info(path=path)
|
return service.info(path=path)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/download")
|
||||||
|
async def download(
|
||||||
|
path: list[str] = Query(...),
|
||||||
|
service: FileOpsService = Depends(get_file_ops_service),
|
||||||
|
) -> StreamingResponse:
|
||||||
|
prepared = service.prepare_download(paths=path)
|
||||||
|
response = StreamingResponse(
|
||||||
|
prepared["content"],
|
||||||
|
headers=prepared["headers"],
|
||||||
|
media_type=prepared["content_type"],
|
||||||
|
)
|
||||||
|
if prepared.get("cleanup"):
|
||||||
|
response.background = BackgroundTask(prepared["cleanup"])
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/download/archive-prepare", response_model=TaskCreateResponse, status_code=202)
|
||||||
|
async def archive_prepare(
|
||||||
|
request: ArchivePrepareRequest,
|
||||||
|
service: ArchiveDownloadTaskService = Depends(get_archive_download_task_service),
|
||||||
|
) -> TaskCreateResponse:
|
||||||
|
return service.create_archive_prepare_task(paths=request.paths)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/download/archive/{task_id}")
|
||||||
|
async def archive_download(
|
||||||
|
task_id: str,
|
||||||
|
service: ArchiveDownloadTaskService = Depends(get_archive_download_task_service),
|
||||||
|
) -> StreamingResponse:
|
||||||
|
prepared = service.prepare_ready_archive_download(task_id=task_id)
|
||||||
|
return StreamingResponse(
|
||||||
|
prepared["content"],
|
||||||
|
headers=prepared["headers"],
|
||||||
|
media_type=prepared["content_type"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/download/archive/{task_id}/cancel", response_model=TaskDetailResponse)
|
||||||
|
async def archive_cancel(
|
||||||
|
task_id: str,
|
||||||
|
service: ArchiveDownloadTaskService = Depends(get_archive_download_task_service),
|
||||||
|
) -> TaskDetailResponse:
|
||||||
|
return TaskDetailResponse(**service.cancel_archive_prepare_task(task_id=task_id))
|
||||||
|
|
||||||
|
|
||||||
@router.get("/video")
|
@router.get("/video")
|
||||||
async def video(
|
async def video(
|
||||||
path: str,
|
path: str,
|
||||||
@@ -79,6 +139,19 @@ async def pdf(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/image")
|
||||||
|
async def image(
|
||||||
|
path: str,
|
||||||
|
service: FileOpsService = Depends(get_file_ops_service),
|
||||||
|
) -> StreamingResponse:
|
||||||
|
prepared = service.prepare_image_stream(path=path)
|
||||||
|
return StreamingResponse(
|
||||||
|
prepared["content"],
|
||||||
|
headers=prepared["headers"],
|
||||||
|
media_type=prepared["content_type"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/thumbnail")
|
@router.get("/thumbnail")
|
||||||
async def thumbnail(
|
async def thumbnail(
|
||||||
path: str,
|
path: str,
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends, status
|
||||||
|
|
||||||
from backend.app.api.schemas import TaskDetailResponse, TaskListResponse
|
from backend.app.api.schemas import TaskDetailResponse, TaskListResponse
|
||||||
from backend.app.dependencies import get_task_service
|
from backend.app.dependencies import get_task_service
|
||||||
@@ -17,3 +17,8 @@ async def list_tasks(service: TaskService = Depends(get_task_service)) -> TaskLi
|
|||||||
@router.get("/{task_id}", response_model=TaskDetailResponse)
|
@router.get("/{task_id}", response_model=TaskDetailResponse)
|
||||||
async def get_task(task_id: str, service: TaskService = Depends(get_task_service)) -> TaskDetailResponse:
|
async def get_task(task_id: str, service: TaskService = Depends(get_task_service)) -> TaskDetailResponse:
|
||||||
return service.get_task(task_id)
|
return service.get_task(task_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{task_id}/cancel", response_model=TaskDetailResponse, status_code=status.HTTP_200_OK)
|
||||||
|
async def cancel_task(task_id: str, service: TaskService = Depends(get_task_service)) -> TaskDetailResponse:
|
||||||
|
return service.cancel_task(task_id)
|
||||||
|
|||||||
@@ -51,13 +51,22 @@ class RenameResponse(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class DeleteRequest(BaseModel):
|
class DeleteRequest(BaseModel):
|
||||||
path: str
|
path: str | None = None
|
||||||
|
recursive: bool = False
|
||||||
|
paths: list[str] | None = None
|
||||||
|
recursive_paths: list[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
class DeleteResponse(BaseModel):
|
class DeleteResponse(BaseModel):
|
||||||
path: str
|
path: str
|
||||||
|
|
||||||
|
|
||||||
|
class UploadResponse(BaseModel):
|
||||||
|
path: str
|
||||||
|
size: int
|
||||||
|
modified: str
|
||||||
|
|
||||||
|
|
||||||
class ViewResponse(BaseModel):
|
class ViewResponse(BaseModel):
|
||||||
path: str
|
path: str
|
||||||
name: str
|
name: str
|
||||||
@@ -81,6 +90,10 @@ class SaveResponse(BaseModel):
|
|||||||
modified: str
|
modified: str
|
||||||
|
|
||||||
|
|
||||||
|
class ArchivePrepareRequest(BaseModel):
|
||||||
|
paths: list[str]
|
||||||
|
|
||||||
|
|
||||||
class FileInfoResponse(BaseModel):
|
class FileInfoResponse(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
path: str
|
path: str
|
||||||
@@ -92,18 +105,33 @@ class FileInfoResponse(BaseModel):
|
|||||||
content_type: str | None = None
|
content_type: str | None = None
|
||||||
owner: str | None = None
|
owner: str | None = None
|
||||||
group: str | None = None
|
group: str | None = None
|
||||||
|
width: int | None = None
|
||||||
|
height: int | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class ZipDownloadLimitsResponse(BaseModel):
|
||||||
|
max_items: int
|
||||||
|
max_total_input_bytes: int
|
||||||
|
max_individual_file_bytes: int
|
||||||
|
scan_timeout_seconds: float
|
||||||
|
symlink_policy: str
|
||||||
|
|
||||||
|
|
||||||
class SettingsResponse(BaseModel):
|
class SettingsResponse(BaseModel):
|
||||||
show_thumbnails: bool
|
show_thumbnails: bool
|
||||||
preferred_startup_path_left: str | None = None
|
preferred_startup_path_left: str | None = None
|
||||||
preferred_startup_path_right: str | None = None
|
preferred_startup_path_right: str | None = None
|
||||||
|
selected_theme: str
|
||||||
|
selected_color_mode: str
|
||||||
|
zip_download_limits: ZipDownloadLimitsResponse
|
||||||
|
|
||||||
|
|
||||||
class SettingsUpdateRequest(BaseModel):
|
class SettingsUpdateRequest(BaseModel):
|
||||||
show_thumbnails: bool | None = None
|
show_thumbnails: bool | None = None
|
||||||
preferred_startup_path_left: str | None = None
|
preferred_startup_path_left: str | None = None
|
||||||
preferred_startup_path_right: str | None = None
|
preferred_startup_path_right: str | None = None
|
||||||
|
selected_theme: str | None = None
|
||||||
|
selected_color_mode: str | None = None
|
||||||
|
|
||||||
|
|
||||||
class TaskListItem(BaseModel):
|
class TaskListItem(BaseModel):
|
||||||
@@ -140,8 +168,14 @@ class TaskDetailResponse(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class CopyRequest(BaseModel):
|
class CopyRequest(BaseModel):
|
||||||
source: str
|
source: str | None = None
|
||||||
destination: str
|
destination: str | None = None
|
||||||
|
sources: list[str] | None = None
|
||||||
|
destination_base: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateRequest(BaseModel):
|
||||||
|
paths: list[str]
|
||||||
|
|
||||||
|
|
||||||
class TaskCreateResponse(BaseModel):
|
class TaskCreateResponse(BaseModel):
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
@@ -6,8 +6,8 @@ from contextlib import contextmanager
|
|||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
VALID_HISTORY_STATUSES = {"queued", "completed", "failed"}
|
VALID_HISTORY_STATUSES = {"queued", "completed", "failed", "requested", "ready", "preflight_failed", "cancelled"}
|
||||||
VALID_HISTORY_OPERATIONS = {"mkdir", "rename", "delete", "copy", "move"}
|
VALID_HISTORY_OPERATIONS = {"mkdir", "rename", "delete", "copy", "move", "upload", "download", "duplicate"}
|
||||||
|
|
||||||
|
|
||||||
class HistoryRepository:
|
class HistoryRepository:
|
||||||
@@ -119,6 +119,27 @@ class HistoryRepository:
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def reconcile_entries_failed(
|
||||||
|
self,
|
||||||
|
entry_ids: list[str],
|
||||||
|
*,
|
||||||
|
error_code: str = "task_interrupted",
|
||||||
|
error_message: str = "Task was interrupted before completion",
|
||||||
|
) -> None:
|
||||||
|
if not entry_ids:
|
||||||
|
return
|
||||||
|
finished_at = self._now_iso()
|
||||||
|
placeholders = ", ".join("?" for _ in entry_ids)
|
||||||
|
with self._connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
f"""
|
||||||
|
UPDATE history
|
||||||
|
SET status = ?, error_code = ?, error_message = ?, finished_at = ?
|
||||||
|
WHERE id IN ({placeholders})
|
||||||
|
""",
|
||||||
|
("failed", error_code, error_message, finished_at, *entry_ids),
|
||||||
|
)
|
||||||
|
|
||||||
def _ensure_schema(self) -> None:
|
def _ensure_schema(self) -> None:
|
||||||
db_path = Path(self._db_path)
|
db_path = Path(self._db_path)
|
||||||
if db_path.parent and str(db_path.parent) not in {"", "."}:
|
if db_path.parent and str(db_path.parent) not in {"", "."}:
|
||||||
|
|||||||
@@ -6,8 +6,9 @@ from contextlib import contextmanager
|
|||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
VALID_STATUSES = {"queued", "running", "completed", "failed"}
|
VALID_STATUSES = {"queued", "running", "cancelling", "completed", "failed", "requested", "preparing", "ready", "cancelled"}
|
||||||
VALID_OPERATIONS = {"copy", "move"}
|
VALID_OPERATIONS = {"copy", "move", "download", "duplicate", "delete"}
|
||||||
|
NON_TERMINAL_STATUSES = ("queued", "running", "cancelling", "requested", "preparing")
|
||||||
TASK_MIGRATION_COLUMNS: dict[str, str] = {
|
TASK_MIGRATION_COLUMNS: dict[str, str] = {
|
||||||
"operation": "TEXT NOT NULL DEFAULT 'copy'",
|
"operation": "TEXT NOT NULL DEFAULT 'copy'",
|
||||||
"status": "TEXT NOT NULL DEFAULT 'queued'",
|
"status": "TEXT NOT NULL DEFAULT 'queued'",
|
||||||
@@ -32,9 +33,18 @@ class TaskRepository:
|
|||||||
self._db_path = db_path
|
self._db_path = db_path
|
||||||
self._ensure_schema()
|
self._ensure_schema()
|
||||||
|
|
||||||
def create_task(self, operation: str, source: str, destination: str, task_id: str | None = None) -> dict:
|
def create_task(
|
||||||
|
self,
|
||||||
|
operation: str,
|
||||||
|
source: str,
|
||||||
|
destination: str,
|
||||||
|
task_id: str | None = None,
|
||||||
|
status: str = "queued",
|
||||||
|
) -> dict:
|
||||||
if operation not in VALID_OPERATIONS:
|
if operation not in VALID_OPERATIONS:
|
||||||
raise ValueError("invalid operation")
|
raise ValueError("invalid operation")
|
||||||
|
if status not in VALID_STATUSES:
|
||||||
|
raise ValueError("invalid status")
|
||||||
|
|
||||||
task_id = task_id or str(uuid.uuid4())
|
task_id = task_id or str(uuid.uuid4())
|
||||||
created_at = self._now_iso()
|
created_at = self._now_iso()
|
||||||
@@ -52,7 +62,7 @@ class TaskRepository:
|
|||||||
(
|
(
|
||||||
task_id,
|
task_id,
|
||||||
operation,
|
operation,
|
||||||
"queued",
|
status,
|
||||||
source,
|
source,
|
||||||
destination,
|
destination,
|
||||||
None,
|
None,
|
||||||
@@ -133,17 +143,37 @@ class TaskRepository:
|
|||||||
done_items: int | None = None,
|
done_items: int | None = None,
|
||||||
total_items: int | None = None,
|
total_items: int | None = None,
|
||||||
current_item: str | None = None,
|
current_item: str | None = None,
|
||||||
) -> None:
|
) -> bool:
|
||||||
started_at = self._now_iso()
|
started_at = self._now_iso()
|
||||||
with self._connection() as conn:
|
with self._connection() as conn:
|
||||||
conn.execute(
|
cursor = conn.execute(
|
||||||
"""
|
"""
|
||||||
UPDATE tasks
|
UPDATE tasks
|
||||||
SET status = ?, started_at = ?, done_bytes = ?, total_bytes = ?, done_items = ?, total_items = ?, current_item = ?
|
SET status = ?, started_at = ?, done_bytes = ?, total_bytes = ?, done_items = ?, total_items = ?, current_item = ?
|
||||||
WHERE id = ?
|
WHERE id = ? AND status = ?
|
||||||
""",
|
""",
|
||||||
("running", started_at, done_bytes, total_bytes, done_items, total_items, current_item, task_id),
|
("running", started_at, done_bytes, total_bytes, done_items, total_items, current_item, task_id, "queued"),
|
||||||
)
|
)
|
||||||
|
return cursor.rowcount > 0
|
||||||
|
|
||||||
|
def mark_preparing(
|
||||||
|
self,
|
||||||
|
task_id: str,
|
||||||
|
done_items: int | None = None,
|
||||||
|
total_items: int | None = None,
|
||||||
|
current_item: str | None = None,
|
||||||
|
) -> bool:
|
||||||
|
started_at = self._now_iso()
|
||||||
|
with self._connection() as conn:
|
||||||
|
cursor = conn.execute(
|
||||||
|
"""
|
||||||
|
UPDATE tasks
|
||||||
|
SET status = ?, started_at = COALESCE(started_at, ?), done_items = ?, total_items = ?, current_item = ?
|
||||||
|
WHERE id = ? AND status = ?
|
||||||
|
""",
|
||||||
|
("preparing", started_at, done_items, total_items, current_item, task_id, "requested"),
|
||||||
|
)
|
||||||
|
return cursor.rowcount > 0
|
||||||
|
|
||||||
def update_progress(
|
def update_progress(
|
||||||
self,
|
self,
|
||||||
@@ -171,17 +201,36 @@ class TaskRepository:
|
|||||||
total_bytes: int | None = None,
|
total_bytes: int | None = None,
|
||||||
done_items: int | None = None,
|
done_items: int | None = None,
|
||||||
total_items: int | None = None,
|
total_items: int | None = None,
|
||||||
) -> None:
|
) -> bool:
|
||||||
finished_at = self._now_iso()
|
finished_at = self._now_iso()
|
||||||
with self._connection() as conn:
|
with self._connection() as conn:
|
||||||
conn.execute(
|
cursor = conn.execute(
|
||||||
"""
|
"""
|
||||||
UPDATE tasks
|
UPDATE tasks
|
||||||
SET status = ?, finished_at = ?, done_bytes = ?, total_bytes = ?, done_items = ?, total_items = ?
|
SET status = ?, finished_at = ?, done_bytes = ?, total_bytes = ?, done_items = ?, total_items = ?, current_item = NULL
|
||||||
WHERE id = ?
|
WHERE id = ? AND status = ?
|
||||||
""",
|
""",
|
||||||
("completed", finished_at, done_bytes, total_bytes, done_items, total_items, task_id),
|
("completed", finished_at, done_bytes, total_bytes, done_items, total_items, task_id, "running"),
|
||||||
)
|
)
|
||||||
|
return cursor.rowcount > 0
|
||||||
|
|
||||||
|
def mark_ready(
|
||||||
|
self,
|
||||||
|
task_id: str,
|
||||||
|
done_items: int | None = None,
|
||||||
|
total_items: int | None = None,
|
||||||
|
) -> bool:
|
||||||
|
finished_at = self._now_iso()
|
||||||
|
with self._connection() as conn:
|
||||||
|
cursor = conn.execute(
|
||||||
|
"""
|
||||||
|
UPDATE tasks
|
||||||
|
SET status = ?, finished_at = ?, done_items = ?, total_items = ?, current_item = NULL
|
||||||
|
WHERE id = ? AND status = ?
|
||||||
|
""",
|
||||||
|
("ready", finished_at, done_items, total_items, task_id, "preparing"),
|
||||||
|
)
|
||||||
|
return cursor.rowcount > 0
|
||||||
|
|
||||||
def mark_failed(
|
def mark_failed(
|
||||||
self,
|
self,
|
||||||
@@ -216,6 +265,97 @@ class TaskRepository:
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def mark_failed_if_not_cancelled(
|
||||||
|
self,
|
||||||
|
task_id: str,
|
||||||
|
error_code: str,
|
||||||
|
error_message: str,
|
||||||
|
failed_item: str | None,
|
||||||
|
done_bytes: int | None,
|
||||||
|
total_bytes: int | None,
|
||||||
|
done_items: int | None = None,
|
||||||
|
total_items: int | None = None,
|
||||||
|
) -> bool:
|
||||||
|
finished_at = self._now_iso()
|
||||||
|
with self._connection() as conn:
|
||||||
|
cursor = conn.execute(
|
||||||
|
"""
|
||||||
|
UPDATE tasks
|
||||||
|
SET status = ?, finished_at = ?, error_code = ?, error_message = ?, failed_item = ?, done_bytes = ?, total_bytes = ?, done_items = ?, total_items = ?, current_item = NULL
|
||||||
|
WHERE id = ? AND status != ?
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
"failed",
|
||||||
|
finished_at,
|
||||||
|
error_code,
|
||||||
|
error_message,
|
||||||
|
failed_item,
|
||||||
|
done_bytes,
|
||||||
|
total_bytes,
|
||||||
|
done_items,
|
||||||
|
total_items,
|
||||||
|
task_id,
|
||||||
|
"cancelled",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return cursor.rowcount > 0
|
||||||
|
|
||||||
|
def mark_cancelled(self, task_id: str) -> bool:
|
||||||
|
finished_at = self._now_iso()
|
||||||
|
with self._connection() as conn:
|
||||||
|
cursor = conn.execute(
|
||||||
|
"""
|
||||||
|
UPDATE tasks
|
||||||
|
SET status = ?, finished_at = ?, current_item = NULL
|
||||||
|
WHERE id = ? AND status IN (?, ?)
|
||||||
|
""",
|
||||||
|
("cancelled", finished_at, task_id, "requested", "preparing"),
|
||||||
|
)
|
||||||
|
return cursor.rowcount > 0
|
||||||
|
|
||||||
|
def request_cancellation(self, task_id: str) -> dict | None:
|
||||||
|
finished_at = self._now_iso()
|
||||||
|
with self._connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
UPDATE tasks
|
||||||
|
SET status = ?, finished_at = ?, current_item = NULL
|
||||||
|
WHERE id = ? AND status = ?
|
||||||
|
""",
|
||||||
|
("cancelled", finished_at, task_id, "queued"),
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
UPDATE tasks
|
||||||
|
SET status = ?
|
||||||
|
WHERE id = ? AND status = ?
|
||||||
|
""",
|
||||||
|
("cancelling", task_id, "running"),
|
||||||
|
)
|
||||||
|
row = conn.execute("SELECT * FROM tasks WHERE id = ?", (task_id,)).fetchone()
|
||||||
|
return self._to_dict(row) if row else None
|
||||||
|
|
||||||
|
def finalize_cancelled(
|
||||||
|
self,
|
||||||
|
task_id: str,
|
||||||
|
*,
|
||||||
|
done_bytes: int | None = None,
|
||||||
|
total_bytes: int | None = None,
|
||||||
|
done_items: int | None = None,
|
||||||
|
total_items: int | None = None,
|
||||||
|
) -> bool:
|
||||||
|
finished_at = self._now_iso()
|
||||||
|
with self._connection() as conn:
|
||||||
|
cursor = conn.execute(
|
||||||
|
"""
|
||||||
|
UPDATE tasks
|
||||||
|
SET status = ?, finished_at = ?, done_bytes = ?, total_bytes = ?, done_items = ?, total_items = ?, current_item = NULL
|
||||||
|
WHERE id = ? AND status IN (?, ?)
|
||||||
|
""",
|
||||||
|
("cancelled", finished_at, done_bytes, total_bytes, done_items, total_items, task_id, "cancelling", "queued"),
|
||||||
|
)
|
||||||
|
return cursor.rowcount > 0
|
||||||
|
|
||||||
def _ensure_schema(self) -> None:
|
def _ensure_schema(self) -> None:
|
||||||
db_path = Path(self._db_path)
|
db_path = Path(self._db_path)
|
||||||
if db_path.parent and str(db_path.parent) not in {"", "."}:
|
if db_path.parent and str(db_path.parent) not in {"", "."}:
|
||||||
@@ -244,14 +384,100 @@ class TaskRepository:
|
|||||||
)
|
)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS task_artifacts (
|
||||||
|
task_id TEXT PRIMARY KEY,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
file_name TEXT NOT NULL,
|
||||||
|
expires_at TEXT NOT NULL,
|
||||||
|
created_at TEXT NOT NULL
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"""
|
"""
|
||||||
CREATE INDEX IF NOT EXISTS idx_tasks_created_at_desc
|
CREATE INDEX IF NOT EXISTS idx_tasks_created_at_desc
|
||||||
ON tasks(created_at DESC)
|
ON tasks(created_at DESC)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_task_artifacts_expires_at
|
||||||
|
ON task_artifacts(expires_at ASC)
|
||||||
|
"""
|
||||||
|
)
|
||||||
self._migrate_tasks_columns(conn)
|
self._migrate_tasks_columns(conn)
|
||||||
|
|
||||||
|
def upsert_artifact(self, *, task_id: str, file_path: str, file_name: str, expires_at: str) -> dict:
|
||||||
|
created_at = self._now_iso()
|
||||||
|
with self._connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO task_artifacts (task_id, file_path, file_name, expires_at, created_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
ON CONFLICT(task_id) DO UPDATE SET
|
||||||
|
file_path = excluded.file_path,
|
||||||
|
file_name = excluded.file_name,
|
||||||
|
expires_at = excluded.expires_at
|
||||||
|
""",
|
||||||
|
(task_id, file_path, file_name, expires_at, created_at),
|
||||||
|
)
|
||||||
|
row = conn.execute("SELECT * FROM task_artifacts WHERE task_id = ?", (task_id,)).fetchone()
|
||||||
|
return self._artifact_to_dict(row)
|
||||||
|
|
||||||
|
def get_artifact(self, task_id: str) -> dict | None:
|
||||||
|
with self._connection() as conn:
|
||||||
|
row = conn.execute("SELECT * FROM task_artifacts WHERE task_id = ?", (task_id,)).fetchone()
|
||||||
|
return self._artifact_to_dict(row) if row else None
|
||||||
|
|
||||||
|
def list_artifacts(self) -> list[dict]:
|
||||||
|
with self._connection() as conn:
|
||||||
|
rows = conn.execute("SELECT * FROM task_artifacts ORDER BY created_at ASC").fetchall()
|
||||||
|
return [self._artifact_to_dict(row) for row in rows]
|
||||||
|
|
||||||
|
def delete_artifact(self, task_id: str) -> None:
|
||||||
|
with self._connection() as conn:
|
||||||
|
conn.execute("DELETE FROM task_artifacts WHERE task_id = ?", (task_id,))
|
||||||
|
|
||||||
|
def reconcile_incomplete_tasks(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
error_code: str = "task_interrupted",
|
||||||
|
error_message: str = "Task was interrupted before completion",
|
||||||
|
) -> list[str]:
|
||||||
|
finished_at = self._now_iso()
|
||||||
|
placeholders = ", ".join("?" for _ in NON_TERMINAL_STATUSES)
|
||||||
|
with self._connection() as conn:
|
||||||
|
rows = conn.execute(
|
||||||
|
f"""
|
||||||
|
SELECT id
|
||||||
|
FROM tasks
|
||||||
|
WHERE status IN ({placeholders})
|
||||||
|
""",
|
||||||
|
NON_TERMINAL_STATUSES,
|
||||||
|
).fetchall()
|
||||||
|
task_ids = [row["id"] for row in rows]
|
||||||
|
if not task_ids:
|
||||||
|
return []
|
||||||
|
task_placeholders = ", ".join("?" for _ in task_ids)
|
||||||
|
conn.execute(
|
||||||
|
f"""
|
||||||
|
UPDATE tasks
|
||||||
|
SET status = ?, finished_at = ?, error_code = ?, error_message = ?, current_item = NULL
|
||||||
|
WHERE id IN ({task_placeholders})
|
||||||
|
""",
|
||||||
|
("failed", finished_at, error_code, error_message, *task_ids),
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
f"""
|
||||||
|
DELETE FROM task_artifacts
|
||||||
|
WHERE task_id IN ({task_placeholders})
|
||||||
|
""",
|
||||||
|
task_ids,
|
||||||
|
)
|
||||||
|
return task_ids
|
||||||
|
|
||||||
def _migrate_tasks_columns(self, conn: sqlite3.Connection) -> None:
|
def _migrate_tasks_columns(self, conn: sqlite3.Connection) -> None:
|
||||||
rows = conn.execute("PRAGMA table_info(tasks)").fetchall()
|
rows = conn.execute("PRAGMA table_info(tasks)").fetchall()
|
||||||
existing_columns = {row["name"] for row in rows}
|
existing_columns = {row["name"] for row in rows}
|
||||||
@@ -298,6 +524,16 @@ class TaskRepository:
|
|||||||
"finished_at": row["finished_at"],
|
"finished_at": row["finished_at"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _artifact_to_dict(row: sqlite3.Row) -> dict:
|
||||||
|
return {
|
||||||
|
"task_id": row["task_id"],
|
||||||
|
"file_path": row["file_path"],
|
||||||
|
"file_name": row["file_name"],
|
||||||
|
"expires_at": row["expires_at"],
|
||||||
|
"created_at": row["created_at"],
|
||||||
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _now_iso() -> str:
|
def _now_iso() -> str:
|
||||||
return datetime.now(tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
return datetime.now(tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from backend.app.config import Settings, get_settings
|
from backend.app.config import Settings, get_settings
|
||||||
from backend.app.db.bookmark_repository import BookmarkRepository
|
from backend.app.db.bookmark_repository import BookmarkRepository
|
||||||
@@ -12,6 +13,9 @@ from backend.app.security.path_guard import PathGuard
|
|||||||
from backend.app.services.bookmark_service import BookmarkService
|
from backend.app.services.bookmark_service import BookmarkService
|
||||||
from backend.app.services.browse_service import BrowseService
|
from backend.app.services.browse_service import BrowseService
|
||||||
from backend.app.services.copy_task_service import CopyTaskService
|
from backend.app.services.copy_task_service import CopyTaskService
|
||||||
|
from backend.app.services.archive_download_task_service import ArchiveDownloadTaskService
|
||||||
|
from backend.app.services.delete_task_service import DeleteTaskService
|
||||||
|
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
||||||
from backend.app.services.file_ops_service import FileOpsService
|
from backend.app.services.file_ops_service import FileOpsService
|
||||||
from backend.app.services.history_service import HistoryService
|
from backend.app.services.history_service import HistoryService
|
||||||
from backend.app.services.move_task_service import MoveTaskService
|
from backend.app.services.move_task_service import MoveTaskService
|
||||||
@@ -64,6 +68,12 @@ def get_task_runner() -> TaskRunner:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def get_archive_artifact_root() -> str:
|
||||||
|
settings: Settings = get_settings()
|
||||||
|
return str(Path(settings.task_db_path).resolve().parent / "archive_tmp")
|
||||||
|
|
||||||
|
|
||||||
async def get_browse_service() -> BrowseService:
|
async def get_browse_service() -> BrowseService:
|
||||||
return BrowseService(path_guard=get_path_guard(), filesystem=get_filesystem_adapter())
|
return BrowseService(path_guard=get_path_guard(), filesystem=get_filesystem_adapter())
|
||||||
|
|
||||||
@@ -76,8 +86,23 @@ async def get_file_ops_service() -> FileOpsService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_archive_download_task_service() -> ArchiveDownloadTaskService:
|
||||||
|
return ArchiveDownloadTaskService(
|
||||||
|
path_guard=get_path_guard(),
|
||||||
|
repository=get_task_repository(),
|
||||||
|
runner=get_task_runner(),
|
||||||
|
history_repository=get_history_repository(),
|
||||||
|
file_ops_service=FileOpsService(
|
||||||
|
path_guard=get_path_guard(),
|
||||||
|
filesystem=get_filesystem_adapter(),
|
||||||
|
history_repository=get_history_repository(),
|
||||||
|
),
|
||||||
|
artifact_root=Path(get_archive_artifact_root()),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def get_task_service() -> TaskService:
|
async def get_task_service() -> TaskService:
|
||||||
return TaskService(repository=get_task_repository())
|
return TaskService(repository=get_task_repository(), history_repository=get_history_repository())
|
||||||
|
|
||||||
|
|
||||||
async def get_copy_task_service() -> CopyTaskService:
|
async def get_copy_task_service() -> CopyTaskService:
|
||||||
@@ -89,6 +114,24 @@ async def get_copy_task_service() -> CopyTaskService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_delete_task_service() -> DeleteTaskService:
|
||||||
|
return DeleteTaskService(
|
||||||
|
path_guard=get_path_guard(),
|
||||||
|
repository=get_task_repository(),
|
||||||
|
runner=get_task_runner(),
|
||||||
|
history_repository=get_history_repository(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_duplicate_task_service() -> DuplicateTaskService:
|
||||||
|
return DuplicateTaskService(
|
||||||
|
path_guard=get_path_guard(),
|
||||||
|
repository=get_task_repository(),
|
||||||
|
runner=get_task_runner(),
|
||||||
|
history_repository=get_history_repository(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def get_move_task_service() -> MoveTaskService:
|
async def get_move_task_service() -> MoveTaskService:
|
||||||
return MoveTaskService(
|
return MoveTaskService(
|
||||||
path_guard=get_path_guard(),
|
path_guard=get_path_guard(),
|
||||||
|
|||||||
Binary file not shown.
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import shutil
|
import shutil
|
||||||
import mimetypes
|
import mimetypes
|
||||||
|
import struct
|
||||||
import grp
|
import grp
|
||||||
import pwd
|
import pwd
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
@@ -23,6 +24,7 @@ class FilesystemAdapter:
|
|||||||
group = None
|
group = None
|
||||||
|
|
||||||
content_type, _ = mimetypes.guess_type(path.name)
|
content_type, _ = mimetypes.guess_type(path.name)
|
||||||
|
width, height = self._image_dimensions(path) if path.is_file() else (None, None)
|
||||||
return {
|
return {
|
||||||
"name": path.name,
|
"name": path.name,
|
||||||
"size": int(stat.st_size) if path.is_file() else None,
|
"size": int(stat.st_size) if path.is_file() else None,
|
||||||
@@ -31,6 +33,8 @@ class FilesystemAdapter:
|
|||||||
"group": group,
|
"group": group,
|
||||||
"content_type": content_type,
|
"content_type": content_type,
|
||||||
"extension": path.suffix.lower() or None,
|
"extension": path.suffix.lower() or None,
|
||||||
|
"width": width,
|
||||||
|
"height": height,
|
||||||
}
|
}
|
||||||
|
|
||||||
def list_directory(self, directory: Path, show_hidden: bool) -> tuple[list[dict], list[dict]]:
|
def list_directory(self, directory: Path, show_hidden: bool) -> tuple[list[dict], list[dict]]:
|
||||||
@@ -100,6 +104,9 @@ class FilesystemAdapter:
|
|||||||
def delete_empty_directory(self, path: Path) -> None:
|
def delete_empty_directory(self, path: Path) -> None:
|
||||||
path.rmdir()
|
path.rmdir()
|
||||||
|
|
||||||
|
def delete_directory_recursive(self, path: Path) -> None:
|
||||||
|
shutil.rmtree(path)
|
||||||
|
|
||||||
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
||||||
src = Path(source)
|
src = Path(source)
|
||||||
dst = Path(destination)
|
dst = Path(destination)
|
||||||
@@ -113,6 +120,9 @@ class FilesystemAdapter:
|
|||||||
on_progress(out_f.tell())
|
on_progress(out_f.tell())
|
||||||
shutil.copystat(src, dst, follow_symlinks=False)
|
shutil.copystat(src, dst, follow_symlinks=False)
|
||||||
|
|
||||||
|
def copy_directory(self, source: str, destination: str) -> None:
|
||||||
|
shutil.copytree(source, destination, symlinks=True, copy_function=shutil.copy2)
|
||||||
|
|
||||||
def read_text_preview(self, path: Path, max_bytes: int, encoding: str = "utf-8") -> dict:
|
def read_text_preview(self, path: Path, max_bytes: int, encoding: str = "utf-8") -> dict:
|
||||||
size = int(path.stat().st_size)
|
size = int(path.stat().st_size)
|
||||||
limit = max_bytes + 1
|
limit = max_bytes + 1
|
||||||
@@ -136,6 +146,19 @@ class FilesystemAdapter:
|
|||||||
"modified": self.modified_iso(path),
|
"modified": self.modified_iso(path),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def write_uploaded_file(self, path: Path, file_stream, chunk_size: int = 1024 * 1024, overwrite: bool = False) -> dict:
|
||||||
|
mode = "wb" if overwrite else "xb"
|
||||||
|
with path.open(mode) as handle:
|
||||||
|
while True:
|
||||||
|
chunk = file_stream.read(chunk_size)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
handle.write(chunk)
|
||||||
|
return {
|
||||||
|
"size": int(path.stat().st_size),
|
||||||
|
"modified": self.modified_iso(path),
|
||||||
|
}
|
||||||
|
|
||||||
async def stream_file_range(self, path: Path, start: int, end: int, chunk_size: int = 1024 * 1024):
|
async def stream_file_range(self, path: Path, start: int, end: int, chunk_size: int = 1024 * 1024):
|
||||||
with path.open("rb") as handle:
|
with path.open("rb") as handle:
|
||||||
handle.seek(start)
|
handle.seek(start)
|
||||||
@@ -159,3 +182,114 @@ class FilesystemAdapter:
|
|||||||
def modified_iso(path: Path) -> str:
|
def modified_iso(path: Path) -> str:
|
||||||
stat = path.stat()
|
stat = path.stat()
|
||||||
return datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
return datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||||
|
|
||||||
|
def _image_dimensions(self, path: Path) -> tuple[int | None, int | None]:
|
||||||
|
suffix = path.suffix.lower()
|
||||||
|
try:
|
||||||
|
if suffix == ".png":
|
||||||
|
return self._png_dimensions(path)
|
||||||
|
if suffix in {".jpg", ".jpeg"}:
|
||||||
|
return self._jpeg_dimensions(path)
|
||||||
|
if suffix == ".gif":
|
||||||
|
return self._gif_dimensions(path)
|
||||||
|
if suffix == ".bmp":
|
||||||
|
return self._bmp_dimensions(path)
|
||||||
|
if suffix == ".webp":
|
||||||
|
return self._webp_dimensions(path)
|
||||||
|
if suffix == ".avif":
|
||||||
|
return self._avif_dimensions(path)
|
||||||
|
except (OSError, ValueError, struct.error):
|
||||||
|
return None, None
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _png_dimensions(path: Path) -> tuple[int | None, int | None]:
|
||||||
|
with path.open("rb") as handle:
|
||||||
|
header = handle.read(24)
|
||||||
|
if len(header) < 24 or header[:8] != b"\x89PNG\r\n\x1a\n":
|
||||||
|
return None, None
|
||||||
|
return struct.unpack(">II", header[16:24])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _jpeg_dimensions(path: Path) -> tuple[int | None, int | None]:
|
||||||
|
with path.open("rb") as handle:
|
||||||
|
if handle.read(2) != b"\xff\xd8":
|
||||||
|
return None, None
|
||||||
|
while True:
|
||||||
|
marker_prefix = handle.read(1)
|
||||||
|
if not marker_prefix:
|
||||||
|
return None, None
|
||||||
|
if marker_prefix != b"\xff":
|
||||||
|
continue
|
||||||
|
marker = handle.read(1)
|
||||||
|
while marker == b"\xff":
|
||||||
|
marker = handle.read(1)
|
||||||
|
if not marker or marker in {b"\xd8", b"\xd9"}:
|
||||||
|
return None, None
|
||||||
|
segment_length_bytes = handle.read(2)
|
||||||
|
if len(segment_length_bytes) != 2:
|
||||||
|
return None, None
|
||||||
|
segment_length = struct.unpack(">H", segment_length_bytes)[0]
|
||||||
|
if segment_length < 2:
|
||||||
|
return None, None
|
||||||
|
if marker in {b"\xc0", b"\xc1", b"\xc2", b"\xc3", b"\xc5", b"\xc6", b"\xc7", b"\xc9", b"\xca", b"\xcb", b"\xcd", b"\xce", b"\xcf"}:
|
||||||
|
payload = handle.read(5)
|
||||||
|
if len(payload) != 5:
|
||||||
|
return None, None
|
||||||
|
height, width = struct.unpack(">HH", payload[1:5])
|
||||||
|
return width, height
|
||||||
|
handle.seek(segment_length - 2, 1)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _gif_dimensions(path: Path) -> tuple[int | None, int | None]:
|
||||||
|
with path.open("rb") as handle:
|
||||||
|
header = handle.read(10)
|
||||||
|
if len(header) < 10 or header[:6] not in {b"GIF87a", b"GIF89a"}:
|
||||||
|
return None, None
|
||||||
|
width, height = struct.unpack("<HH", header[6:10])
|
||||||
|
return width, height
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _bmp_dimensions(path: Path) -> tuple[int | None, int | None]:
|
||||||
|
with path.open("rb") as handle:
|
||||||
|
header = handle.read(26)
|
||||||
|
if len(header) < 26 or header[:2] != b"BM":
|
||||||
|
return None, None
|
||||||
|
width, height = struct.unpack("<ii", header[18:26])
|
||||||
|
return abs(width), abs(height)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _webp_dimensions(path: Path) -> tuple[int | None, int | None]:
|
||||||
|
with path.open("rb") as handle:
|
||||||
|
header = handle.read(64)
|
||||||
|
if len(header) < 30 or header[:4] != b"RIFF" or header[8:12] != b"WEBP":
|
||||||
|
return None, None
|
||||||
|
chunk = header[12:16]
|
||||||
|
if chunk == b"VP8 " and len(header) >= 30:
|
||||||
|
width, height = struct.unpack("<HH", header[26:30])
|
||||||
|
return width & 0x3FFF, height & 0x3FFF
|
||||||
|
if chunk == b"VP8L" and len(header) >= 25:
|
||||||
|
bits = struct.unpack("<I", header[21:25])[0]
|
||||||
|
width = (bits & 0x3FFF) + 1
|
||||||
|
height = ((bits >> 14) & 0x3FFF) + 1
|
||||||
|
return width, height
|
||||||
|
if chunk == b"VP8X" and len(header) >= 30:
|
||||||
|
width = 1 + int.from_bytes(header[24:27], "little")
|
||||||
|
height = 1 + int.from_bytes(header[27:30], "little")
|
||||||
|
return width, height
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _avif_dimensions(path: Path) -> tuple[int | None, int | None]:
|
||||||
|
with path.open("rb") as handle:
|
||||||
|
data = handle.read(256 * 1024)
|
||||||
|
if b"ftypavif" not in data and b"ftypavis" not in data:
|
||||||
|
return None, None
|
||||||
|
index = data.find(b"ispe")
|
||||||
|
if index == -1 or index + 20 > len(data):
|
||||||
|
return None, None
|
||||||
|
width = int.from_bytes(data[index + 12:index + 16], "big")
|
||||||
|
height = int.from_bytes(data[index + 16:index + 20], "big")
|
||||||
|
if width <= 0 or height <= 0:
|
||||||
|
return None, None
|
||||||
|
return width, height
|
||||||
|
|||||||
@@ -10,13 +10,16 @@ from backend.app.api.errors import AppError
|
|||||||
from backend.app.api.routes_bookmarks import router as bookmarks_router
|
from backend.app.api.routes_bookmarks import router as bookmarks_router
|
||||||
from backend.app.api.routes_browse import router as browse_router
|
from backend.app.api.routes_browse import router as browse_router
|
||||||
from backend.app.api.routes_copy import router as copy_router
|
from backend.app.api.routes_copy import router as copy_router
|
||||||
|
from backend.app.api.routes_duplicate import router as duplicate_router
|
||||||
from backend.app.api.routes_files import router as files_router
|
from backend.app.api.routes_files import router as files_router
|
||||||
from backend.app.api.routes_history import router as history_router
|
from backend.app.api.routes_history import router as history_router
|
||||||
from backend.app.api.routes_move import router as move_router
|
from backend.app.api.routes_move import router as move_router
|
||||||
from backend.app.api.routes_search import router as search_router
|
from backend.app.api.routes_search import router as search_router
|
||||||
from backend.app.api.routes_settings import router as settings_router
|
from backend.app.api.routes_settings import router as settings_router
|
||||||
from backend.app.api.routes_tasks import router as tasks_router
|
from backend.app.api.routes_tasks import router as tasks_router
|
||||||
|
from backend.app.dependencies import get_history_repository, get_task_repository
|
||||||
from backend.app.logging import configure_logging
|
from backend.app.logging import configure_logging
|
||||||
|
from backend.app.services.task_recovery_service import reconcile_persisted_incomplete_tasks
|
||||||
|
|
||||||
configure_logging()
|
configure_logging()
|
||||||
|
|
||||||
@@ -30,6 +33,7 @@ app.mount("/ui", StaticFiles(directory=str(UI_DIR), html=True), name="ui")
|
|||||||
app.include_router(browse_router, prefix="/api")
|
app.include_router(browse_router, prefix="/api")
|
||||||
app.include_router(files_router, prefix="/api")
|
app.include_router(files_router, prefix="/api")
|
||||||
app.include_router(copy_router, prefix="/api")
|
app.include_router(copy_router, prefix="/api")
|
||||||
|
app.include_router(duplicate_router, prefix="/api")
|
||||||
app.include_router(move_router, prefix="/api")
|
app.include_router(move_router, prefix="/api")
|
||||||
app.include_router(search_router, prefix="/api")
|
app.include_router(search_router, prefix="/api")
|
||||||
app.include_router(settings_router, prefix="/api")
|
app.include_router(settings_router, prefix="/api")
|
||||||
@@ -38,6 +42,14 @@ app.include_router(history_router, prefix="/api")
|
|||||||
app.include_router(tasks_router, prefix="/api")
|
app.include_router(tasks_router, prefix="/api")
|
||||||
|
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def reconcile_incomplete_tasks_on_startup() -> None:
|
||||||
|
reconcile_persisted_incomplete_tasks(
|
||||||
|
task_repository=get_task_repository(),
|
||||||
|
history_repository=get_history_repository(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.exception_handler(AppError)
|
@app.exception_handler(AppError)
|
||||||
async def handle_app_error(_: Request, exc: AppError) -> JSONResponse:
|
async def handle_app_error(_: Request, exc: AppError) -> JSONResponse:
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,363 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
import zipfile
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from backend.app.api.errors import AppError
|
||||||
|
from backend.app.api.schemas import TaskCreateResponse
|
||||||
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
|
from backend.app.db.task_repository import TaskRepository
|
||||||
|
from backend.app.security.path_guard import PathGuard
|
||||||
|
from backend.app.services.file_ops_service import FileOpsService
|
||||||
|
from backend.app.tasks_runner import TaskRunner
|
||||||
|
|
||||||
|
ARCHIVE_DOWNLOAD_TTL_SECONDS = 30 * 60
|
||||||
|
|
||||||
|
|
||||||
|
class ArchivePrepareCancelled(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ArchiveDownloadTaskService:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
path_guard: PathGuard,
|
||||||
|
repository: TaskRepository,
|
||||||
|
runner: TaskRunner,
|
||||||
|
history_repository: HistoryRepository | None,
|
||||||
|
file_ops_service: FileOpsService,
|
||||||
|
artifact_root: Path,
|
||||||
|
artifact_ttl_seconds: int = ARCHIVE_DOWNLOAD_TTL_SECONDS,
|
||||||
|
):
|
||||||
|
self._path_guard = path_guard
|
||||||
|
self._repository = repository
|
||||||
|
self._runner = runner
|
||||||
|
self._history_repository = history_repository
|
||||||
|
self._file_ops_service = file_ops_service
|
||||||
|
self._artifact_root = artifact_root
|
||||||
|
self._artifact_ttl_seconds = artifact_ttl_seconds
|
||||||
|
self._artifact_root.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.sweep_artifacts()
|
||||||
|
|
||||||
|
def create_archive_prepare_task(self, paths: list[str]) -> TaskCreateResponse:
|
||||||
|
if not paths:
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="At least one path is required",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.sweep_artifacts()
|
||||||
|
resolved_targets = [self._path_guard.resolve_existing_path(path) for path in paths]
|
||||||
|
mode = self._file_ops_service._download_mode_from_resolved_targets(resolved_targets)
|
||||||
|
if mode == "single_file":
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Single file downloads must use direct download",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
summary = self._file_ops_service._summarize_download_targets([target.relative for target in resolved_targets])
|
||||||
|
archive_name = self._file_ops_service._download_name_for_targets(resolved_targets)
|
||||||
|
task_id = str(uuid.uuid4())
|
||||||
|
task = self._repository.create_task(
|
||||||
|
operation="download",
|
||||||
|
source=summary,
|
||||||
|
destination=archive_name,
|
||||||
|
task_id=task_id,
|
||||||
|
status="requested",
|
||||||
|
)
|
||||||
|
self._record_history(
|
||||||
|
entry_id=task_id,
|
||||||
|
operation="download",
|
||||||
|
status="requested",
|
||||||
|
source=mode,
|
||||||
|
destination=archive_name,
|
||||||
|
path=summary,
|
||||||
|
)
|
||||||
|
target_paths = [target.relative for target in resolved_targets]
|
||||||
|
self._runner.enqueue_archive_prepare(
|
||||||
|
lambda: self._run_archive_prepare_task(
|
||||||
|
task_id=task_id,
|
||||||
|
target_paths=target_paths,
|
||||||
|
archive_name=archive_name,
|
||||||
|
history_mode=mode,
|
||||||
|
history_path=summary,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||||
|
|
||||||
|
def prepare_ready_archive_download(self, task_id: str) -> dict:
|
||||||
|
self.sweep_artifacts()
|
||||||
|
task = self._repository.get_task(task_id)
|
||||||
|
if not task:
|
||||||
|
raise AppError(
|
||||||
|
code="task_not_found",
|
||||||
|
message="Task was not found",
|
||||||
|
status_code=404,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
if task["operation"] != "download":
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Task is not an archive download",
|
||||||
|
status_code=400,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
if task["status"] == "cancelled":
|
||||||
|
raise AppError(
|
||||||
|
code="download_cancelled",
|
||||||
|
message="Archive download was cancelled",
|
||||||
|
status_code=409,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
if task["status"] != "ready":
|
||||||
|
raise AppError(
|
||||||
|
code="download_not_ready",
|
||||||
|
message="Archive download is not ready",
|
||||||
|
status_code=409,
|
||||||
|
details={"task_id": task_id, "status": task["status"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
artifact = self._repository.get_artifact(task_id)
|
||||||
|
if not artifact:
|
||||||
|
raise AppError(
|
||||||
|
code="archive_not_found",
|
||||||
|
message="Prepared archive was not found",
|
||||||
|
status_code=404,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
if self._is_expired(artifact["expires_at"]):
|
||||||
|
self._delete_artifact_record_and_file(task_id, artifact["file_path"])
|
||||||
|
raise AppError(
|
||||||
|
code="archive_expired",
|
||||||
|
message="Prepared archive expired",
|
||||||
|
status_code=410,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
|
||||||
|
artifact_path = Path(artifact["file_path"])
|
||||||
|
if not artifact_path.exists():
|
||||||
|
self._repository.delete_artifact(task_id)
|
||||||
|
raise AppError(
|
||||||
|
code="archive_not_found",
|
||||||
|
message="Prepared archive was not found",
|
||||||
|
status_code=404,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"content": self._file_ops_service._filesystem.stream_file(artifact_path),
|
||||||
|
"headers": {
|
||||||
|
"Content-Disposition": f'attachment; filename="{artifact["file_name"]}"',
|
||||||
|
"Content-Length": str(int(artifact_path.stat().st_size)),
|
||||||
|
},
|
||||||
|
"content_type": "application/zip",
|
||||||
|
}
|
||||||
|
|
||||||
|
def cancel_archive_prepare_task(self, task_id: str) -> dict:
|
||||||
|
self.sweep_artifacts()
|
||||||
|
task = self._repository.get_task(task_id)
|
||||||
|
if not task:
|
||||||
|
raise AppError(
|
||||||
|
code="task_not_found",
|
||||||
|
message="Task was not found",
|
||||||
|
status_code=404,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
if task["operation"] != "download":
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Task is not an archive download",
|
||||||
|
status_code=400,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
if task["status"] == "ready":
|
||||||
|
raise AppError(
|
||||||
|
code="download_not_cancellable",
|
||||||
|
message="Archive download is already ready",
|
||||||
|
status_code=409,
|
||||||
|
details={"task_id": task_id, "status": task["status"]},
|
||||||
|
)
|
||||||
|
if task["status"] in {"failed", "cancelled"}:
|
||||||
|
raise AppError(
|
||||||
|
code="download_not_cancellable",
|
||||||
|
message="Archive download cannot be cancelled",
|
||||||
|
status_code=409,
|
||||||
|
details={"task_id": task_id, "status": task["status"]},
|
||||||
|
)
|
||||||
|
if not self._repository.mark_cancelled(task_id):
|
||||||
|
current = self._repository.get_task(task_id)
|
||||||
|
current_status = current["status"] if current else task["status"]
|
||||||
|
raise AppError(
|
||||||
|
code="download_not_cancellable",
|
||||||
|
message="Archive download cannot be cancelled",
|
||||||
|
status_code=409,
|
||||||
|
details={"task_id": task_id, "status": current_status},
|
||||||
|
)
|
||||||
|
self._cleanup_task_artifacts(task_id)
|
||||||
|
self._update_history_cancelled(task_id)
|
||||||
|
cancelled_task = self._repository.get_task(task_id)
|
||||||
|
if not cancelled_task:
|
||||||
|
raise AppError(
|
||||||
|
code="task_not_found",
|
||||||
|
message="Task was not found",
|
||||||
|
status_code=404,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
return cancelled_task
|
||||||
|
|
||||||
|
def sweep_artifacts(self) -> None:
|
||||||
|
self._artifact_root.mkdir(parents=True, exist_ok=True)
|
||||||
|
referenced_paths: set[Path] = set()
|
||||||
|
for artifact in self._repository.list_artifacts():
|
||||||
|
artifact_path = Path(artifact["file_path"])
|
||||||
|
referenced_paths.add(artifact_path)
|
||||||
|
if self._is_expired(artifact["expires_at"]) or not artifact_path.exists():
|
||||||
|
self._delete_artifact_record_and_file(artifact["task_id"], artifact["file_path"])
|
||||||
|
|
||||||
|
for candidate in self._artifact_root.iterdir():
|
||||||
|
if candidate.is_file() and candidate not in referenced_paths:
|
||||||
|
try:
|
||||||
|
candidate.unlink()
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _run_archive_prepare_task(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
task_id: str,
|
||||||
|
target_paths: list[str],
|
||||||
|
archive_name: str,
|
||||||
|
history_mode: str,
|
||||||
|
history_path: str,
|
||||||
|
) -> None:
|
||||||
|
partial_path = self._artifact_root / f"{task_id}.partial.zip"
|
||||||
|
final_path = self._artifact_root / f"{task_id}.zip"
|
||||||
|
total_items = len(target_paths)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._raise_if_cancelled(task_id)
|
||||||
|
if not self._repository.mark_preparing(
|
||||||
|
task_id=task_id,
|
||||||
|
done_items=0,
|
||||||
|
total_items=total_items,
|
||||||
|
current_item=target_paths[0] if target_paths else None,
|
||||||
|
):
|
||||||
|
self._raise_if_cancelled(task_id)
|
||||||
|
return
|
||||||
|
resolved_targets = [self._path_guard.resolve_existing_path(path) for path in target_paths]
|
||||||
|
self._raise_if_cancelled(task_id)
|
||||||
|
self._file_ops_service._validate_zip_download_archive_names(resolved_targets)
|
||||||
|
self._file_ops_service._run_zip_download_preflight(resolved_targets)
|
||||||
|
self._raise_if_cancelled(task_id)
|
||||||
|
|
||||||
|
with zipfile.ZipFile(partial_path, "w", compression=zipfile.ZIP_DEFLATED) as archive:
|
||||||
|
for index, resolved_target in enumerate(resolved_targets):
|
||||||
|
self._raise_if_cancelled(task_id)
|
||||||
|
self._repository.update_progress(
|
||||||
|
task_id=task_id,
|
||||||
|
done_items=index,
|
||||||
|
total_items=total_items,
|
||||||
|
current_item=resolved_target.relative,
|
||||||
|
)
|
||||||
|
self._file_ops_service._write_download_target_to_zip(
|
||||||
|
archive,
|
||||||
|
resolved_target,
|
||||||
|
on_each_item=lambda: self._raise_if_cancelled(task_id),
|
||||||
|
)
|
||||||
|
|
||||||
|
self._raise_if_cancelled(task_id)
|
||||||
|
os.replace(partial_path, final_path)
|
||||||
|
self._raise_if_cancelled(task_id)
|
||||||
|
self._repository.upsert_artifact(
|
||||||
|
task_id=task_id,
|
||||||
|
file_path=str(final_path),
|
||||||
|
file_name=archive_name,
|
||||||
|
expires_at=self._expires_at_iso(),
|
||||||
|
)
|
||||||
|
if not self._repository.mark_ready(
|
||||||
|
task_id=task_id,
|
||||||
|
done_items=total_items,
|
||||||
|
total_items=total_items,
|
||||||
|
):
|
||||||
|
self._cleanup_task_artifacts(task_id)
|
||||||
|
self._raise_if_cancelled(task_id)
|
||||||
|
return
|
||||||
|
self._update_history_ready(task_id)
|
||||||
|
except ArchivePrepareCancelled:
|
||||||
|
self._cleanup_task_artifacts(task_id)
|
||||||
|
except AppError as exc:
|
||||||
|
self._cleanup_task_artifacts(task_id)
|
||||||
|
if self._repository.mark_failed_if_not_cancelled(
|
||||||
|
task_id=task_id,
|
||||||
|
error_code=exc.code,
|
||||||
|
error_message=exc.message,
|
||||||
|
failed_item=history_path,
|
||||||
|
done_bytes=None,
|
||||||
|
total_bytes=None,
|
||||||
|
done_items=0,
|
||||||
|
total_items=total_items,
|
||||||
|
):
|
||||||
|
self._update_history_failed(task_id, exc.code, exc.message)
|
||||||
|
except OSError as exc:
|
||||||
|
self._cleanup_task_artifacts(task_id)
|
||||||
|
if self._repository.mark_failed_if_not_cancelled(
|
||||||
|
task_id=task_id,
|
||||||
|
error_code="io_error",
|
||||||
|
error_message=str(exc),
|
||||||
|
failed_item=history_path,
|
||||||
|
done_bytes=None,
|
||||||
|
total_bytes=None,
|
||||||
|
done_items=0,
|
||||||
|
total_items=total_items,
|
||||||
|
):
|
||||||
|
self._update_history_failed(task_id, "io_error", str(exc))
|
||||||
|
|
||||||
|
def _cleanup_task_artifacts(self, task_id: str) -> None:
|
||||||
|
self._delete_artifact_record_and_file(task_id, str(self._artifact_root / f"{task_id}.partial.zip"))
|
||||||
|
self._delete_artifact_record_and_file(task_id, str(self._artifact_root / f"{task_id}.zip"))
|
||||||
|
|
||||||
|
def _delete_artifact_record_and_file(self, task_id: str, file_path: str) -> None:
|
||||||
|
self._repository.delete_artifact(task_id)
|
||||||
|
path = Path(file_path)
|
||||||
|
try:
|
||||||
|
path.unlink()
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _update_history_ready(self, task_id: str) -> None:
|
||||||
|
if self._history_repository:
|
||||||
|
self._history_repository.update_entry(entry_id=task_id, status="ready")
|
||||||
|
|
||||||
|
def _update_history_failed(self, task_id: str, error_code: str, error_message: str) -> None:
|
||||||
|
if self._history_repository:
|
||||||
|
self._history_repository.update_entry(
|
||||||
|
entry_id=task_id,
|
||||||
|
status="failed",
|
||||||
|
error_code=error_code,
|
||||||
|
error_message=error_message,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _update_history_cancelled(self, task_id: str) -> None:
|
||||||
|
if self._history_repository:
|
||||||
|
self._history_repository.update_entry(entry_id=task_id, status="cancelled")
|
||||||
|
|
||||||
|
def _record_history(self, **kwargs) -> None:
|
||||||
|
if self._history_repository:
|
||||||
|
self._history_repository.create_entry(**kwargs)
|
||||||
|
|
||||||
|
def _expires_at_iso(self) -> str:
|
||||||
|
return (datetime.now(timezone.utc) + timedelta(seconds=self._artifact_ttl_seconds)).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_expired(expires_at: str) -> bool:
|
||||||
|
return datetime.now(timezone.utc) >= datetime.fromisoformat(expires_at.replace("Z", "+00:00"))
|
||||||
|
|
||||||
|
def _raise_if_cancelled(self, task_id: str) -> None:
|
||||||
|
task = self._repository.get_task(task_id)
|
||||||
|
if task and task["status"] == "cancelled":
|
||||||
|
raise ArchivePrepareCancelled()
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
@@ -8,7 +9,7 @@ from backend.app.api.errors import AppError
|
|||||||
from backend.app.api.schemas import TaskCreateResponse
|
from backend.app.api.schemas import TaskCreateResponse
|
||||||
from backend.app.db.history_repository import HistoryRepository
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
from backend.app.db.task_repository import TaskRepository
|
from backend.app.db.task_repository import TaskRepository
|
||||||
from backend.app.security.path_guard import PathGuard
|
from backend.app.security.path_guard import PathGuard, ResolvedPath
|
||||||
from backend.app.tasks_runner import TaskRunner
|
from backend.app.tasks_runner import TaskRunner
|
||||||
|
|
||||||
|
|
||||||
@@ -20,62 +21,38 @@ class CopyTaskService:
|
|||||||
self._history_repository = history_repository
|
self._history_repository = history_repository
|
||||||
|
|
||||||
def create_copy_task(self, source: str, destination: str) -> TaskCreateResponse:
|
def create_copy_task(self, source: str, destination: str) -> TaskCreateResponse:
|
||||||
|
if not source or not destination:
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Source and destination are required",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
resolved_source = self._path_guard.resolve_existing_path(source)
|
item = self._build_copy_item(source=source, destination=destination)
|
||||||
_, _, lexical_source, _ = self._path_guard.resolve_lexical_path(source)
|
|
||||||
if lexical_source.is_symlink():
|
|
||||||
raise AppError(
|
|
||||||
code="type_conflict",
|
|
||||||
message="Source must be a regular file",
|
|
||||||
status_code=409,
|
|
||||||
details={"path": source},
|
|
||||||
)
|
|
||||||
if not resolved_source.absolute.is_file():
|
|
||||||
raise AppError(
|
|
||||||
code="type_conflict",
|
|
||||||
message="Source must be a file",
|
|
||||||
status_code=409,
|
|
||||||
details={"path": source},
|
|
||||||
)
|
|
||||||
|
|
||||||
resolved_destination = self._path_guard.resolve_path(destination)
|
|
||||||
destination_parent = resolved_destination.absolute.parent
|
|
||||||
parent_relative = self._path_guard.entry_relative_path(
|
|
||||||
resolved_destination.alias,
|
|
||||||
destination_parent,
|
|
||||||
display_style=resolved_destination.display_style,
|
|
||||||
)
|
|
||||||
self._map_directory_validation(parent_relative)
|
|
||||||
|
|
||||||
if resolved_destination.absolute.exists():
|
|
||||||
raise AppError(
|
|
||||||
code="already_exists",
|
|
||||||
message="Target path already exists",
|
|
||||||
status_code=409,
|
|
||||||
details={"path": resolved_destination.relative},
|
|
||||||
)
|
|
||||||
|
|
||||||
total_bytes = int(resolved_source.absolute.stat().st_size)
|
|
||||||
task_id = str(uuid.uuid4())
|
task_id = str(uuid.uuid4())
|
||||||
task = self._repository.create_task(
|
task = self._repository.create_task(
|
||||||
operation="copy",
|
operation="copy",
|
||||||
source=resolved_source.relative,
|
source=item["source_relative"],
|
||||||
destination=resolved_destination.relative,
|
destination=item["destination_relative"],
|
||||||
task_id=task_id,
|
task_id=task_id,
|
||||||
)
|
)
|
||||||
self._record_history(
|
self._record_history(
|
||||||
entry_id=task_id,
|
entry_id=task_id,
|
||||||
operation="copy",
|
operation="copy",
|
||||||
status="queued",
|
status="queued",
|
||||||
source=resolved_source.relative,
|
source=item["source_relative"],
|
||||||
destination=resolved_destination.relative,
|
destination=item["destination_relative"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if item["kind"] == "directory":
|
||||||
|
self._runner.enqueue_copy_directory(task_id=task["id"], item=item)
|
||||||
|
else:
|
||||||
self._runner.enqueue_copy_file(
|
self._runner.enqueue_copy_file(
|
||||||
task_id=task["id"],
|
task_id=task["id"],
|
||||||
source=str(resolved_source.absolute),
|
source=item["source_absolute"],
|
||||||
destination=str(resolved_destination.absolute),
|
destination=item["destination_absolute"],
|
||||||
total_bytes=total_bytes,
|
total_bytes=item["total_bytes"],
|
||||||
|
current_item=item["files"][0]["label"],
|
||||||
)
|
)
|
||||||
|
|
||||||
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||||
@@ -91,6 +68,152 @@ class CopyTaskService:
|
|||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def create_batch_copy_task(self, sources: list[str] | None, destination_base: str | None) -> TaskCreateResponse:
|
||||||
|
if not sources or len(sources) < 2:
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Batch copy requires at least 2 sources",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
if not destination_base:
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Destination base is required",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolved_destination_base = self._path_guard.resolve_directory_path(destination_base)
|
||||||
|
items: list[dict] = []
|
||||||
|
for source in sources:
|
||||||
|
destination = self._join_destination_base(destination_base, self._path_guard.resolve_existing_path(source).absolute.name)
|
||||||
|
item = self._build_copy_item(
|
||||||
|
source=source,
|
||||||
|
destination=destination,
|
||||||
|
resolved_destination=resolved_destination_base,
|
||||||
|
destination_base=destination_base,
|
||||||
|
include_root_prefix=True,
|
||||||
|
)
|
||||||
|
items.append(item)
|
||||||
|
|
||||||
|
task_id = str(uuid.uuid4())
|
||||||
|
task = self._repository.create_task(
|
||||||
|
operation="copy",
|
||||||
|
source=f"{len(items)} items",
|
||||||
|
destination=resolved_destination_base.relative,
|
||||||
|
task_id=task_id,
|
||||||
|
)
|
||||||
|
self._record_history(
|
||||||
|
entry_id=task_id,
|
||||||
|
operation="copy",
|
||||||
|
status="queued",
|
||||||
|
source=f"{len(items)} items",
|
||||||
|
destination=resolved_destination_base.relative,
|
||||||
|
)
|
||||||
|
self._runner.enqueue_copy_batch(
|
||||||
|
task_id=task["id"],
|
||||||
|
items=[
|
||||||
|
{
|
||||||
|
"source": item["source_absolute"],
|
||||||
|
"destination": item["destination_absolute"],
|
||||||
|
"kind": item["kind"],
|
||||||
|
"files": item["files"],
|
||||||
|
"directories": item["directories"],
|
||||||
|
}
|
||||||
|
for item in items
|
||||||
|
],
|
||||||
|
)
|
||||||
|
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||||
|
|
||||||
|
def _build_copy_item(
|
||||||
|
self,
|
||||||
|
source: str,
|
||||||
|
destination: str,
|
||||||
|
resolved_destination: ResolvedPath | None = None,
|
||||||
|
destination_base: str | None = None,
|
||||||
|
include_root_prefix: bool = False,
|
||||||
|
) -> dict:
|
||||||
|
resolved_source = self._path_guard.resolve_existing_path(source)
|
||||||
|
_, _, lexical_source, _ = self._path_guard.resolve_lexical_path(source)
|
||||||
|
if lexical_source.is_symlink():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Source must not be a symlink",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": source},
|
||||||
|
)
|
||||||
|
|
||||||
|
source_is_file = resolved_source.absolute.is_file()
|
||||||
|
source_is_directory = resolved_source.absolute.is_dir()
|
||||||
|
if not source_is_file and not source_is_directory:
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Unsupported source path type",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": source},
|
||||||
|
)
|
||||||
|
|
||||||
|
resolved_destination = resolved_destination or self._path_guard.resolve_path(destination)
|
||||||
|
destination_absolute = (
|
||||||
|
resolved_destination.absolute / resolved_source.absolute.name
|
||||||
|
if destination_base is not None
|
||||||
|
else resolved_destination.absolute
|
||||||
|
)
|
||||||
|
destination_relative = self._path_guard.entry_relative_path(
|
||||||
|
resolved_destination.alias,
|
||||||
|
destination_absolute,
|
||||||
|
display_style=resolved_destination.display_style,
|
||||||
|
)
|
||||||
|
destination_parent = destination_absolute.parent
|
||||||
|
parent_relative = self._path_guard.entry_relative_path(
|
||||||
|
resolved_destination.alias,
|
||||||
|
destination_parent,
|
||||||
|
display_style=resolved_destination.display_style,
|
||||||
|
)
|
||||||
|
self._map_directory_validation(parent_relative)
|
||||||
|
|
||||||
|
if destination_absolute.exists():
|
||||||
|
raise AppError(
|
||||||
|
code="already_exists",
|
||||||
|
message="Target path already exists",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": destination_relative},
|
||||||
|
)
|
||||||
|
|
||||||
|
if source_is_directory and self._is_nested_destination(resolved_source.absolute, destination_absolute):
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Destination cannot be inside source",
|
||||||
|
status_code=400,
|
||||||
|
details={"path": source, "destination": destination_relative},
|
||||||
|
)
|
||||||
|
|
||||||
|
if source_is_directory:
|
||||||
|
directories, files = self._build_directory_plan(
|
||||||
|
resolved_source=resolved_source,
|
||||||
|
destination_root=destination_absolute,
|
||||||
|
include_root_prefix=include_root_prefix,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
files = [
|
||||||
|
{
|
||||||
|
"source": str(resolved_source.absolute),
|
||||||
|
"destination": str(destination_absolute),
|
||||||
|
"label": resolved_source.absolute.name,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
directories = []
|
||||||
|
|
||||||
|
return {
|
||||||
|
"source_relative": resolved_source.relative,
|
||||||
|
"destination_relative": destination_relative,
|
||||||
|
"source_absolute": str(resolved_source.absolute),
|
||||||
|
"destination_absolute": str(destination_absolute),
|
||||||
|
"kind": "directory" if source_is_directory else "file",
|
||||||
|
"total_bytes": int(resolved_source.absolute.stat().st_size) if source_is_file else None,
|
||||||
|
"files": files,
|
||||||
|
"directories": directories,
|
||||||
|
}
|
||||||
|
|
||||||
def _map_directory_validation(self, relative_path: str) -> None:
|
def _map_directory_validation(self, relative_path: str) -> None:
|
||||||
try:
|
try:
|
||||||
self._path_guard.resolve_directory_path(relative_path)
|
self._path_guard.resolve_directory_path(relative_path)
|
||||||
@@ -104,6 +227,82 @@ class CopyTaskService:
|
|||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def _build_directory_plan(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
resolved_source: ResolvedPath,
|
||||||
|
destination_root: Path,
|
||||||
|
include_root_prefix: bool,
|
||||||
|
) -> tuple[list[dict[str, str]], list[dict[str, str]]]:
|
||||||
|
directories: list[dict[str, str]] = [
|
||||||
|
{
|
||||||
|
"source": str(resolved_source.absolute),
|
||||||
|
"destination": str(destination_root),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
files: list[dict[str, str]] = []
|
||||||
|
for root, dirnames, filenames in os.walk(resolved_source.absolute, followlinks=False):
|
||||||
|
root_path = Path(root)
|
||||||
|
dirnames.sort(key=str.lower)
|
||||||
|
filenames.sort(key=str.lower)
|
||||||
|
for name in dirnames:
|
||||||
|
entry = root_path / name
|
||||||
|
if entry.is_symlink():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Source directory must not contain symlinks",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_source.relative},
|
||||||
|
)
|
||||||
|
relative = entry.relative_to(resolved_source.absolute)
|
||||||
|
directories.append(
|
||||||
|
{
|
||||||
|
"source": str(entry),
|
||||||
|
"destination": str(destination_root / relative),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
for name in filenames:
|
||||||
|
entry = root_path / name
|
||||||
|
if entry.is_symlink():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Source directory must not contain symlinks",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_source.relative},
|
||||||
|
)
|
||||||
|
relative = entry.relative_to(resolved_source.absolute)
|
||||||
|
files.append(
|
||||||
|
{
|
||||||
|
"source": str(entry),
|
||||||
|
"destination": str(destination_root / relative),
|
||||||
|
"label": self._progress_label(
|
||||||
|
top_level_name=resolved_source.absolute.name,
|
||||||
|
relative_path=relative,
|
||||||
|
include_root_prefix=include_root_prefix,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return directories, files
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _progress_label(*, top_level_name: str, relative_path: Path, include_root_prefix: bool) -> str:
|
||||||
|
relative_value = relative_path.as_posix()
|
||||||
|
if not relative_value:
|
||||||
|
return top_level_name
|
||||||
|
return f"{top_level_name}/{relative_value}" if include_root_prefix else relative_value
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _join_destination_base(destination_base: str, name: str) -> str:
|
||||||
|
return f"{destination_base.rstrip('/')}/{name}" if destination_base.rstrip("/") else f"/{name}"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_nested_destination(source: Path, destination: Path) -> bool:
|
||||||
|
try:
|
||||||
|
destination.relative_to(source)
|
||||||
|
return True
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
def _record_history(self, **kwargs) -> None:
|
def _record_history(self, **kwargs) -> None:
|
||||||
if self._history_repository:
|
if self._history_repository:
|
||||||
self._history_repository.create_entry(**kwargs)
|
self._history_repository.create_entry(**kwargs)
|
||||||
|
|||||||
@@ -0,0 +1,221 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from backend.app.api.errors import AppError
|
||||||
|
from backend.app.api.schemas import TaskCreateResponse
|
||||||
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
|
from backend.app.db.task_repository import TaskRepository
|
||||||
|
from backend.app.security.path_guard import PathGuard
|
||||||
|
from backend.app.tasks_runner import TaskRunner
|
||||||
|
|
||||||
|
|
||||||
|
class DeleteTaskService:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
path_guard: PathGuard,
|
||||||
|
repository: TaskRepository,
|
||||||
|
runner: TaskRunner,
|
||||||
|
history_repository: HistoryRepository | None = None,
|
||||||
|
):
|
||||||
|
self._path_guard = path_guard
|
||||||
|
self._repository = repository
|
||||||
|
self._runner = runner
|
||||||
|
self._history_repository = history_repository
|
||||||
|
|
||||||
|
def create_delete_task(self, path: str | None, recursive: bool = False) -> TaskCreateResponse:
|
||||||
|
if not path:
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Query parameter 'path' is required",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
item = self._build_delete_item(path=path, recursive=recursive)
|
||||||
|
|
||||||
|
task_id = str(uuid.uuid4())
|
||||||
|
task = self._repository.create_task(
|
||||||
|
operation="delete",
|
||||||
|
source=item["relative_path"],
|
||||||
|
destination="",
|
||||||
|
task_id=task_id,
|
||||||
|
)
|
||||||
|
self._record_history(
|
||||||
|
entry_id=task_id,
|
||||||
|
operation="delete",
|
||||||
|
status="queued",
|
||||||
|
path=item["relative_path"],
|
||||||
|
)
|
||||||
|
self._runner.enqueue_delete_path(task_id=task["id"], item=item)
|
||||||
|
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||||
|
except AppError as exc:
|
||||||
|
self._record_history(
|
||||||
|
operation="delete",
|
||||||
|
status="failed",
|
||||||
|
path=path,
|
||||||
|
error_code=exc.code,
|
||||||
|
error_message=exc.message,
|
||||||
|
finished_at=self._now_iso(),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
except OSError as exc:
|
||||||
|
error = AppError(
|
||||||
|
code="io_error",
|
||||||
|
message="Filesystem operation failed",
|
||||||
|
status_code=500,
|
||||||
|
details={"reason": str(exc)},
|
||||||
|
)
|
||||||
|
self._record_history(
|
||||||
|
operation="delete",
|
||||||
|
status="failed",
|
||||||
|
path=path,
|
||||||
|
error_code=error.code,
|
||||||
|
error_message=error.message,
|
||||||
|
finished_at=self._now_iso(),
|
||||||
|
)
|
||||||
|
raise error
|
||||||
|
|
||||||
|
def create_batch_delete_task(self, paths: list[str] | None, recursive_paths: list[str] | None = None) -> TaskCreateResponse:
|
||||||
|
if not paths or len(paths) < 2:
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Batch delete requires at least 2 paths",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
recursive_paths_set = set(recursive_paths or [])
|
||||||
|
invalid_recursive = sorted(path for path in recursive_paths_set if path not in paths)
|
||||||
|
if invalid_recursive:
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Recursive delete paths must be included in the batch selection",
|
||||||
|
status_code=400,
|
||||||
|
details={"path": invalid_recursive[0]},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
items = [
|
||||||
|
self._build_delete_item(
|
||||||
|
path=path,
|
||||||
|
recursive=path in recursive_paths_set,
|
||||||
|
include_root_prefix=True,
|
||||||
|
)
|
||||||
|
for path in paths
|
||||||
|
]
|
||||||
|
|
||||||
|
task_id = str(uuid.uuid4())
|
||||||
|
task = self._repository.create_task(
|
||||||
|
operation="delete",
|
||||||
|
source=f"{len(items)} items",
|
||||||
|
destination="",
|
||||||
|
task_id=task_id,
|
||||||
|
)
|
||||||
|
self._record_history(
|
||||||
|
entry_id=task_id,
|
||||||
|
operation="delete",
|
||||||
|
status="queued",
|
||||||
|
path=f"{len(items)} items",
|
||||||
|
)
|
||||||
|
self._runner.enqueue_delete_batch(task_id=task["id"], items=items)
|
||||||
|
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||||
|
except AppError as exc:
|
||||||
|
self._record_history(
|
||||||
|
operation="delete",
|
||||||
|
status="failed",
|
||||||
|
path=f"{len(paths or [])} items",
|
||||||
|
error_code=exc.code,
|
||||||
|
error_message=exc.message,
|
||||||
|
finished_at=self._now_iso(),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
except OSError as exc:
|
||||||
|
error = AppError(
|
||||||
|
code="io_error",
|
||||||
|
message="Filesystem operation failed",
|
||||||
|
status_code=500,
|
||||||
|
details={"reason": str(exc)},
|
||||||
|
)
|
||||||
|
self._record_history(
|
||||||
|
operation="delete",
|
||||||
|
status="failed",
|
||||||
|
path=f"{len(paths or [])} items",
|
||||||
|
error_code=error.code,
|
||||||
|
error_message=error.message,
|
||||||
|
finished_at=self._now_iso(),
|
||||||
|
)
|
||||||
|
raise error
|
||||||
|
|
||||||
|
def _build_delete_item(self, path: str, recursive: bool, include_root_prefix: bool = False) -> dict:
|
||||||
|
resolved_target = self._path_guard.resolve_existing_path(path)
|
||||||
|
|
||||||
|
if resolved_target.absolute.is_file():
|
||||||
|
label = resolved_target.absolute.name
|
||||||
|
files = [{"path": str(resolved_target.absolute), "label": label}]
|
||||||
|
directories: list[str] = []
|
||||||
|
kind = "file"
|
||||||
|
elif resolved_target.absolute.is_dir():
|
||||||
|
kind = "directory"
|
||||||
|
if not recursive and any(resolved_target.absolute.iterdir()):
|
||||||
|
raise AppError(
|
||||||
|
code="directory_not_empty",
|
||||||
|
message="Directory is not empty",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_target.relative},
|
||||||
|
)
|
||||||
|
if recursive:
|
||||||
|
files, directories = self._build_recursive_delete_plan(
|
||||||
|
resolved_target.absolute,
|
||||||
|
include_root_prefix=include_root_prefix,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
files = []
|
||||||
|
directories = [str(resolved_target.absolute)]
|
||||||
|
else:
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Unsupported path type for delete",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_target.relative},
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"target": str(resolved_target.absolute),
|
||||||
|
"relative_path": resolved_target.relative,
|
||||||
|
"kind": kind,
|
||||||
|
"recursive": recursive,
|
||||||
|
"files": files,
|
||||||
|
"directories": directories,
|
||||||
|
"progress_total_items": len(files),
|
||||||
|
"progress_label": files[0]["label"] if files else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _build_recursive_delete_plan(self, root: Path, include_root_prefix: bool = False) -> tuple[list[dict[str, str]], list[str]]:
|
||||||
|
files: list[dict[str, str]] = []
|
||||||
|
directories: list[str] = []
|
||||||
|
start_prefix = Path(root.name) if include_root_prefix else Path()
|
||||||
|
|
||||||
|
def walk(path: Path, relative_prefix: Path) -> None:
|
||||||
|
for entry in sorted(path.iterdir(), key=lambda child: child.name.lower()):
|
||||||
|
relative_path = relative_prefix / entry.name
|
||||||
|
if entry.is_symlink():
|
||||||
|
files.append({"path": str(entry), "label": relative_path.as_posix()})
|
||||||
|
continue
|
||||||
|
if entry.is_dir():
|
||||||
|
walk(entry, relative_path)
|
||||||
|
directories.append(str(entry))
|
||||||
|
continue
|
||||||
|
files.append({"path": str(entry), "label": relative_path.as_posix()})
|
||||||
|
|
||||||
|
walk(root, start_prefix)
|
||||||
|
directories.append(str(root))
|
||||||
|
return files, directories
|
||||||
|
|
||||||
|
def _record_history(self, **kwargs) -> None:
|
||||||
|
if self._history_repository:
|
||||||
|
self._history_repository.create_entry(**kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _now_iso() -> str:
|
||||||
|
return datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||||
@@ -0,0 +1,266 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from backend.app.api.errors import AppError
|
||||||
|
from backend.app.api.schemas import TaskCreateResponse
|
||||||
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
|
from backend.app.db.task_repository import TaskRepository
|
||||||
|
from backend.app.security.path_guard import PathGuard, ResolvedPath
|
||||||
|
from backend.app.tasks_runner import TaskRunner
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateTaskService:
|
||||||
|
def __init__(self, path_guard: PathGuard, repository: TaskRepository, runner: TaskRunner, history_repository: HistoryRepository | None = None):
|
||||||
|
self._path_guard = path_guard
|
||||||
|
self._repository = repository
|
||||||
|
self._runner = runner
|
||||||
|
self._history_repository = history_repository
|
||||||
|
|
||||||
|
def create_duplicate_task(self, paths: list[str] | None) -> TaskCreateResponse:
|
||||||
|
if not paths:
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="At least 1 path is required",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
items: list[dict[str, str]] = []
|
||||||
|
reserved_destinations: set[str] = set()
|
||||||
|
for input_path in paths:
|
||||||
|
item = self._build_duplicate_item(
|
||||||
|
input_path,
|
||||||
|
reserved_destinations,
|
||||||
|
include_root_prefix=len(paths) > 1,
|
||||||
|
)
|
||||||
|
if item is None:
|
||||||
|
continue
|
||||||
|
reserved_destinations.add(item["destination_absolute"])
|
||||||
|
items.append(item)
|
||||||
|
|
||||||
|
source_summary = self._source_summary(paths, items)
|
||||||
|
destination_summary = self._destination_summary(items)
|
||||||
|
task_id = str(uuid.uuid4())
|
||||||
|
task = self._repository.create_task(
|
||||||
|
operation="duplicate",
|
||||||
|
source=source_summary,
|
||||||
|
destination=destination_summary,
|
||||||
|
task_id=task_id,
|
||||||
|
)
|
||||||
|
self._record_history(
|
||||||
|
entry_id=task_id,
|
||||||
|
operation="duplicate",
|
||||||
|
status="queued",
|
||||||
|
source=source_summary,
|
||||||
|
destination=destination_summary,
|
||||||
|
)
|
||||||
|
self._runner.enqueue_duplicate_batch(
|
||||||
|
task_id=task["id"],
|
||||||
|
items=[
|
||||||
|
{
|
||||||
|
"source": item["source_absolute"],
|
||||||
|
"destination": item["destination_absolute"],
|
||||||
|
"kind": item["kind"],
|
||||||
|
"files": item["files"],
|
||||||
|
"directories": item["directories"],
|
||||||
|
}
|
||||||
|
for item in items
|
||||||
|
],
|
||||||
|
)
|
||||||
|
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||||
|
except AppError as exc:
|
||||||
|
self._record_history(
|
||||||
|
operation="duplicate",
|
||||||
|
status="failed",
|
||||||
|
source=paths[0] if len(paths) == 1 else f"{len(paths)} items",
|
||||||
|
destination="same directory",
|
||||||
|
error_code=exc.code,
|
||||||
|
error_message=exc.message,
|
||||||
|
finished_at=self._now_iso(),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _build_duplicate_item(
|
||||||
|
self,
|
||||||
|
source: str,
|
||||||
|
reserved_destinations: set[str],
|
||||||
|
*,
|
||||||
|
include_root_prefix: bool,
|
||||||
|
) -> dict[str, str] | None:
|
||||||
|
resolved_source = self._path_guard.resolve_existing_path(source)
|
||||||
|
_, _, lexical_source, _ = self._path_guard.resolve_lexical_path(source)
|
||||||
|
if self._should_skip_name(lexical_source.name):
|
||||||
|
return None
|
||||||
|
if lexical_source.is_symlink():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Source must not be a symlink",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": source},
|
||||||
|
)
|
||||||
|
|
||||||
|
source_is_file = resolved_source.absolute.is_file()
|
||||||
|
source_is_directory = resolved_source.absolute.is_dir()
|
||||||
|
if not source_is_file and not source_is_directory:
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Unsupported source path type",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": source},
|
||||||
|
)
|
||||||
|
|
||||||
|
destination_absolute = self._next_duplicate_destination(resolved_source.absolute, reserved_destinations)
|
||||||
|
destination_relative = self._path_guard.entry_relative_path(
|
||||||
|
resolved_source.alias,
|
||||||
|
destination_absolute,
|
||||||
|
display_style=resolved_source.display_style,
|
||||||
|
)
|
||||||
|
|
||||||
|
if source_is_directory:
|
||||||
|
directories, files = self._build_directory_plan(
|
||||||
|
resolved_source=resolved_source,
|
||||||
|
destination_root=destination_absolute,
|
||||||
|
include_root_prefix=include_root_prefix,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
files = [
|
||||||
|
{
|
||||||
|
"source": str(resolved_source.absolute),
|
||||||
|
"destination": str(destination_absolute),
|
||||||
|
"label": resolved_source.absolute.name,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
directories = []
|
||||||
|
|
||||||
|
return {
|
||||||
|
"source_relative": resolved_source.relative,
|
||||||
|
"destination_relative": destination_relative,
|
||||||
|
"source_absolute": str(resolved_source.absolute),
|
||||||
|
"destination_absolute": str(destination_absolute),
|
||||||
|
"kind": "directory" if source_is_directory else "file",
|
||||||
|
"files": files,
|
||||||
|
"directories": directories,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _build_directory_plan(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
resolved_source: ResolvedPath,
|
||||||
|
destination_root: Path,
|
||||||
|
include_root_prefix: bool,
|
||||||
|
) -> tuple[list[dict[str, str]], list[dict[str, str]]]:
|
||||||
|
directories: list[dict[str, str]] = [
|
||||||
|
{
|
||||||
|
"source": str(resolved_source.absolute),
|
||||||
|
"destination": str(destination_root),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
files: list[dict[str, str]] = []
|
||||||
|
for root, dirnames, filenames in os.walk(resolved_source.absolute, followlinks=False):
|
||||||
|
dirnames[:] = [name for name in dirnames if not self._should_skip_name(name)]
|
||||||
|
dirnames.sort(key=str.lower)
|
||||||
|
filenames = sorted(filenames, key=str.lower)
|
||||||
|
root_path = Path(root)
|
||||||
|
for name in dirnames:
|
||||||
|
entry = root_path / name
|
||||||
|
if entry.is_symlink():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Source directory must not contain symlinks",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_source.relative},
|
||||||
|
)
|
||||||
|
relative = entry.relative_to(resolved_source.absolute)
|
||||||
|
directories.append(
|
||||||
|
{
|
||||||
|
"source": str(entry),
|
||||||
|
"destination": str(destination_root / relative),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
for name in filenames:
|
||||||
|
if self._should_skip_name(name):
|
||||||
|
continue
|
||||||
|
entry = root_path / name
|
||||||
|
if entry.is_symlink():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Source directory must not contain symlinks",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_source.relative},
|
||||||
|
)
|
||||||
|
relative = entry.relative_to(resolved_source.absolute)
|
||||||
|
files.append(
|
||||||
|
{
|
||||||
|
"source": str(entry),
|
||||||
|
"destination": str(destination_root / relative),
|
||||||
|
"label": self._progress_label(
|
||||||
|
top_level_name=resolved_source.absolute.name,
|
||||||
|
relative_path=relative,
|
||||||
|
include_root_prefix=include_root_prefix,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return directories, files
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _progress_label(*, top_level_name: str, relative_path: Path, include_root_prefix: bool) -> str:
|
||||||
|
relative_value = relative_path.as_posix()
|
||||||
|
if not relative_value:
|
||||||
|
return top_level_name
|
||||||
|
return f"{top_level_name}/{relative_value}" if include_root_prefix else relative_value
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _next_duplicate_destination(cls, source: Path, reserved_destinations: set[str]) -> Path:
|
||||||
|
parent = source.parent
|
||||||
|
candidate_index = 1
|
||||||
|
while True:
|
||||||
|
candidate_name = cls._duplicate_name(source.name, source.is_file(), candidate_index)
|
||||||
|
candidate = parent / candidate_name
|
||||||
|
if not candidate.exists() and str(candidate) not in reserved_destinations:
|
||||||
|
return candidate
|
||||||
|
candidate_index += 1
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _duplicate_name(cls, original_name: str, is_file: bool, index: int) -> str:
|
||||||
|
if not is_file:
|
||||||
|
suffix = ""
|
||||||
|
base_name = original_name
|
||||||
|
else:
|
||||||
|
suffixes = Path(original_name).suffixes
|
||||||
|
suffix = "".join(suffixes)
|
||||||
|
base_name = original_name[: -len(suffix)] if suffix else original_name
|
||||||
|
copy_suffix = " copy" if index == 1 else f" copy {index}"
|
||||||
|
return f"{base_name}{copy_suffix}{suffix}"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _should_skip_name(name: str) -> bool:
|
||||||
|
return name.startswith("._")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _source_summary(paths: list[str], items: list[dict[str, str]]) -> str:
|
||||||
|
if len(paths) == 1:
|
||||||
|
return paths[0]
|
||||||
|
if not items:
|
||||||
|
return "0 items"
|
||||||
|
return DuplicateTaskService._items_label(len(items))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _destination_summary(items: list[dict[str, str]]) -> str:
|
||||||
|
if len(items) == 1:
|
||||||
|
return items[0]["destination_relative"]
|
||||||
|
return "same directory"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _items_label(count: int) -> str:
|
||||||
|
return "1 item" if count == 1 else f"{count} items"
|
||||||
|
|
||||||
|
def _record_history(self, **kwargs) -> None:
|
||||||
|
if self._history_repository:
|
||||||
|
self._history_repository.create_entry(**kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _now_iso() -> str:
|
||||||
|
return datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||||
@@ -1,9 +1,16 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import zipfile
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
from backend.app.api.errors import AppError
|
from backend.app.api.errors import AppError
|
||||||
from backend.app.api.schemas import DeleteResponse, FileInfoResponse, MkdirResponse, RenameResponse, SaveResponse, ViewResponse
|
from backend.app.api.schemas import DeleteResponse, FileInfoResponse, MkdirResponse, RenameResponse, SaveResponse, UploadResponse, ViewResponse
|
||||||
from backend.app.db.history_repository import HistoryRepository
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||||
from backend.app.security.path_guard import PathGuard
|
from backend.app.security.path_guard import PathGuard
|
||||||
@@ -13,6 +20,7 @@ TEXT_EDIT_MAX_BYTES = 256 * 1024
|
|||||||
TEXT_CONTENT_TYPES = {
|
TEXT_CONTENT_TYPES = {
|
||||||
".txt": "text/plain",
|
".txt": "text/plain",
|
||||||
".log": "text/plain",
|
".log": "text/plain",
|
||||||
|
".conf": "text/plain",
|
||||||
".md": "text/markdown",
|
".md": "text/markdown",
|
||||||
".yml": "text/yaml",
|
".yml": "text/yaml",
|
||||||
".yaml": "text/yaml",
|
".yaml": "text/yaml",
|
||||||
@@ -32,6 +40,15 @@ THUMBNAIL_CONTENT_TYPES = {
|
|||||||
".png": "image/png",
|
".png": "image/png",
|
||||||
".webp": "image/webp",
|
".webp": "image/webp",
|
||||||
}
|
}
|
||||||
|
IMAGE_CONTENT_TYPES = {
|
||||||
|
".jpg": "image/jpeg",
|
||||||
|
".jpeg": "image/jpeg",
|
||||||
|
".png": "image/png",
|
||||||
|
".webp": "image/webp",
|
||||||
|
".gif": "image/gif",
|
||||||
|
".bmp": "image/bmp",
|
||||||
|
".avif": "image/avif",
|
||||||
|
}
|
||||||
VIDEO_CONTENT_TYPES = {
|
VIDEO_CONTENT_TYPES = {
|
||||||
".mp4": "video/mp4",
|
".mp4": "video/mp4",
|
||||||
".mkv": "video/x-matroska",
|
".mkv": "video/x-matroska",
|
||||||
@@ -41,11 +58,37 @@ PDF_CONTENT_TYPES = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ZipDownloadPreflightLimits:
|
||||||
|
max_items: int = 1000
|
||||||
|
max_total_input_bytes: int = 2 * 1024 * 1024 * 1024
|
||||||
|
max_individual_file_bytes: int = 500 * 1024 * 1024
|
||||||
|
scan_timeout_seconds: float = 10.0
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ZipDownloadPreflightState:
|
||||||
|
item_count: int = 0
|
||||||
|
total_input_bytes: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
ZIP_DOWNLOAD_PREFLIGHT_LIMITS = ZipDownloadPreflightLimits()
|
||||||
|
|
||||||
|
|
||||||
class FileOpsService:
|
class FileOpsService:
|
||||||
def __init__(self, path_guard: PathGuard, filesystem: FilesystemAdapter, history_repository: HistoryRepository | None = None):
|
def __init__(
|
||||||
|
self,
|
||||||
|
path_guard: PathGuard,
|
||||||
|
filesystem: FilesystemAdapter,
|
||||||
|
history_repository: HistoryRepository | None = None,
|
||||||
|
zip_download_preflight_limits: ZipDownloadPreflightLimits = ZIP_DOWNLOAD_PREFLIGHT_LIMITS,
|
||||||
|
monotonic: Callable[[], float] | None = None,
|
||||||
|
):
|
||||||
self._path_guard = path_guard
|
self._path_guard = path_guard
|
||||||
self._filesystem = filesystem
|
self._filesystem = filesystem
|
||||||
self._history_repository = history_repository
|
self._history_repository = history_repository
|
||||||
|
self._zip_download_preflight_limits = zip_download_preflight_limits
|
||||||
|
self._monotonic = monotonic or time.monotonic
|
||||||
|
|
||||||
def mkdir(self, parent_path: str, name: str) -> MkdirResponse:
|
def mkdir(self, parent_path: str, name: str) -> MkdirResponse:
|
||||||
try:
|
try:
|
||||||
@@ -149,7 +192,7 @@ class FileOpsService:
|
|||||||
self._record_history_error(operation="rename", source=path, destination=new_name, path=path, error=error)
|
self._record_history_error(operation="rename", source=path, destination=new_name, path=path, error=error)
|
||||||
raise error
|
raise error
|
||||||
|
|
||||||
def delete(self, path: str) -> DeleteResponse:
|
def delete(self, path: str, recursive: bool = False) -> DeleteResponse:
|
||||||
try:
|
try:
|
||||||
resolved_target = self._path_guard.resolve_existing_path(path)
|
resolved_target = self._path_guard.resolve_existing_path(path)
|
||||||
|
|
||||||
@@ -157,12 +200,15 @@ class FileOpsService:
|
|||||||
self._filesystem.delete_file(resolved_target.absolute)
|
self._filesystem.delete_file(resolved_target.absolute)
|
||||||
elif resolved_target.absolute.is_dir():
|
elif resolved_target.absolute.is_dir():
|
||||||
if not self._filesystem.is_directory_empty(resolved_target.absolute):
|
if not self._filesystem.is_directory_empty(resolved_target.absolute):
|
||||||
|
if not recursive:
|
||||||
raise AppError(
|
raise AppError(
|
||||||
code="directory_not_empty",
|
code="directory_not_empty",
|
||||||
message="Directory is not empty",
|
message="Directory is not empty",
|
||||||
status_code=409,
|
status_code=409,
|
||||||
details={"path": resolved_target.relative},
|
details={"path": resolved_target.relative},
|
||||||
)
|
)
|
||||||
|
self._filesystem.delete_directory_recursive(resolved_target.absolute)
|
||||||
|
else:
|
||||||
self._filesystem.delete_empty_directory(resolved_target.absolute)
|
self._filesystem.delete_empty_directory(resolved_target.absolute)
|
||||||
else:
|
else:
|
||||||
raise AppError(
|
raise AppError(
|
||||||
@@ -195,6 +241,73 @@ class FileOpsService:
|
|||||||
self._record_history_error(operation="delete", path=path, error=error)
|
self._record_history_error(operation="delete", path=path, error=error)
|
||||||
raise error
|
raise error
|
||||||
|
|
||||||
|
def upload(self, target_path: str, upload_file, overwrite: bool = False) -> UploadResponse:
|
||||||
|
destination_relative = None
|
||||||
|
history_path = target_path
|
||||||
|
try:
|
||||||
|
resolved_target = self._path_guard.resolve_directory_path(target_path)
|
||||||
|
filename = Path(upload_file.filename or "").name
|
||||||
|
safe_name = self._path_guard.validate_name(filename, field="name")
|
||||||
|
destination_relative = self._join_relative(resolved_target.relative, safe_name)
|
||||||
|
history_path = destination_relative
|
||||||
|
resolved_destination = self._path_guard.resolve_path(destination_relative)
|
||||||
|
|
||||||
|
if resolved_destination.absolute.exists():
|
||||||
|
if not overwrite:
|
||||||
|
raise AppError(
|
||||||
|
code="already_exists",
|
||||||
|
message="Target path already exists",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_destination.relative},
|
||||||
|
)
|
||||||
|
if resolved_destination.absolute.is_dir():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Cannot overwrite an existing directory",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_destination.relative},
|
||||||
|
)
|
||||||
|
|
||||||
|
saved = self._filesystem.write_uploaded_file(
|
||||||
|
resolved_destination.absolute,
|
||||||
|
upload_file.file,
|
||||||
|
overwrite=overwrite,
|
||||||
|
)
|
||||||
|
self._record_history(
|
||||||
|
operation="upload",
|
||||||
|
status="completed",
|
||||||
|
destination=resolved_destination.relative,
|
||||||
|
path=resolved_destination.relative,
|
||||||
|
finished_at=self._now_iso(),
|
||||||
|
)
|
||||||
|
return UploadResponse(
|
||||||
|
path=resolved_destination.relative,
|
||||||
|
size=saved["size"],
|
||||||
|
modified=saved["modified"],
|
||||||
|
)
|
||||||
|
except AppError as exc:
|
||||||
|
self._record_history_error(
|
||||||
|
operation="upload",
|
||||||
|
destination=destination_relative,
|
||||||
|
path=history_path,
|
||||||
|
error=exc,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
except OSError as exc:
|
||||||
|
error = AppError(
|
||||||
|
code="io_error",
|
||||||
|
message="Filesystem operation failed",
|
||||||
|
status_code=500,
|
||||||
|
details={"reason": str(exc)},
|
||||||
|
)
|
||||||
|
self._record_history_error(
|
||||||
|
operation="upload",
|
||||||
|
destination=destination_relative,
|
||||||
|
path=history_path,
|
||||||
|
error=error,
|
||||||
|
)
|
||||||
|
raise error
|
||||||
|
|
||||||
def view(self, path: str, for_edit: bool = False) -> ViewResponse:
|
def view(self, path: str, for_edit: bool = False) -> ViewResponse:
|
||||||
resolved_target = self._path_guard.resolve_existing_path(path)
|
resolved_target = self._path_guard.resolve_existing_path(path)
|
||||||
|
|
||||||
@@ -270,8 +383,85 @@ class FileOpsService:
|
|||||||
content_type=metadata["content_type"],
|
content_type=metadata["content_type"],
|
||||||
owner=metadata["owner"],
|
owner=metadata["owner"],
|
||||||
group=metadata["group"],
|
group=metadata["group"],
|
||||||
|
width=metadata["width"],
|
||||||
|
height=metadata["height"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def prepare_download(self, paths: list[str]) -> dict:
|
||||||
|
history_entry_id: str | None = None
|
||||||
|
history_mode = self._download_mode_from_request_paths(paths)
|
||||||
|
history_path = self._summarize_download_targets(paths)
|
||||||
|
history_download_name: str | None = None
|
||||||
|
if not paths:
|
||||||
|
error = AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="At least one path is required",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
self._record_download_failure(
|
||||||
|
mode=history_mode,
|
||||||
|
path_summary=history_path,
|
||||||
|
download_name=None,
|
||||||
|
error=error,
|
||||||
|
history_entry_id=None,
|
||||||
|
)
|
||||||
|
raise error
|
||||||
|
|
||||||
|
try:
|
||||||
|
resolved_targets = [self._path_guard.resolve_existing_path(path) for path in paths]
|
||||||
|
history_mode = self._download_mode_from_resolved_targets(resolved_targets)
|
||||||
|
history_path = self._summarize_download_targets([target.relative for target in resolved_targets])
|
||||||
|
history_download_name = self._download_name_for_targets(resolved_targets)
|
||||||
|
|
||||||
|
if history_mode != "single_file":
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Archive downloads must be prepared first",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
history_entry_id = self._record_download_status(
|
||||||
|
status="requested",
|
||||||
|
mode=history_mode,
|
||||||
|
path_summary=history_path,
|
||||||
|
download_name=history_download_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
prepared = self._prepare_single_file_download(resolved_targets[0])
|
||||||
|
|
||||||
|
self._record_download_status(
|
||||||
|
status="ready",
|
||||||
|
mode=history_mode,
|
||||||
|
path_summary=history_path,
|
||||||
|
download_name=history_download_name,
|
||||||
|
history_entry_id=history_entry_id,
|
||||||
|
)
|
||||||
|
return prepared
|
||||||
|
except AppError as error:
|
||||||
|
self._record_download_failure(
|
||||||
|
mode=history_mode,
|
||||||
|
path_summary=history_path,
|
||||||
|
download_name=history_download_name,
|
||||||
|
error=error,
|
||||||
|
history_entry_id=history_entry_id,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
except OSError as exc:
|
||||||
|
error = AppError(
|
||||||
|
code="io_error",
|
||||||
|
message="Filesystem operation failed",
|
||||||
|
status_code=500,
|
||||||
|
details={"reason": str(exc)},
|
||||||
|
)
|
||||||
|
self._record_download_failure(
|
||||||
|
mode=history_mode,
|
||||||
|
path_summary=history_path,
|
||||||
|
download_name=history_download_name,
|
||||||
|
error=error,
|
||||||
|
history_entry_id=history_entry_id,
|
||||||
|
)
|
||||||
|
raise error
|
||||||
|
|
||||||
def save(self, path: str, content: str, expected_modified: str) -> SaveResponse:
|
def save(self, path: str, content: str, expected_modified: str) -> SaveResponse:
|
||||||
resolved_target = self._path_guard.resolve_existing_path(path)
|
resolved_target = self._path_guard.resolve_existing_path(path)
|
||||||
|
|
||||||
@@ -413,6 +603,39 @@ class FileOpsService:
|
|||||||
"content": self._filesystem.stream_file(resolved_target.absolute),
|
"content": self._filesystem.stream_file(resolved_target.absolute),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def prepare_image_stream(self, path: str) -> dict:
|
||||||
|
resolved_target = self._path_guard.resolve_existing_path(path)
|
||||||
|
|
||||||
|
if resolved_target.absolute.is_dir():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Source must be a file",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_target.relative},
|
||||||
|
)
|
||||||
|
if not resolved_target.absolute.is_file():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Unsupported path type for image",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_target.relative},
|
||||||
|
)
|
||||||
|
|
||||||
|
content_type = self._image_content_type_for(resolved_target.absolute)
|
||||||
|
if content_type is None:
|
||||||
|
raise AppError(
|
||||||
|
code="unsupported_type",
|
||||||
|
message="File type is not supported for image viewing",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_target.relative},
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"headers": {"Content-Length": str(int(resolved_target.absolute.stat().st_size))},
|
||||||
|
"content_type": content_type,
|
||||||
|
"content": self._filesystem.stream_file(resolved_target.absolute),
|
||||||
|
}
|
||||||
|
|
||||||
def prepare_pdf_stream(self, path: str) -> dict:
|
def prepare_pdf_stream(self, path: str) -> dict:
|
||||||
resolved_target = self._path_guard.resolve_existing_path(path)
|
resolved_target = self._path_guard.resolve_existing_path(path)
|
||||||
|
|
||||||
@@ -465,6 +688,10 @@ class FileOpsService:
|
|||||||
def _thumbnail_content_type_for(path: Path) -> str | None:
|
def _thumbnail_content_type_for(path: Path) -> str | None:
|
||||||
return THUMBNAIL_CONTENT_TYPES.get(path.suffix.lower())
|
return THUMBNAIL_CONTENT_TYPES.get(path.suffix.lower())
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _image_content_type_for(path: Path) -> str | None:
|
||||||
|
return IMAGE_CONTENT_TYPES.get(path.suffix.lower())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _pdf_content_type_for(path: Path) -> str | None:
|
def _pdf_content_type_for(path: Path) -> str | None:
|
||||||
return PDF_CONTENT_TYPES.get(path.suffix.lower())
|
return PDF_CONTENT_TYPES.get(path.suffix.lower())
|
||||||
@@ -516,9 +743,286 @@ class FileOpsService:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _now_iso() -> str:
|
def _now_iso() -> str:
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
return datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
return datetime.now(timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||||
|
|
||||||
|
def _prepare_single_file_download(self, resolved_target) -> dict:
|
||||||
|
_, _, lexical_source, _ = self._path_guard.resolve_lexical_path(resolved_target.relative)
|
||||||
|
if lexical_source.is_symlink():
|
||||||
|
raise AppError(
|
||||||
|
code="type_conflict",
|
||||||
|
message="Source must not be a symlink",
|
||||||
|
status_code=409,
|
||||||
|
details={"path": resolved_target.relative},
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"content": self._filesystem.stream_file(resolved_target.absolute),
|
||||||
|
"headers": {
|
||||||
|
"Content-Disposition": f'attachment; filename="{resolved_target.absolute.name}"',
|
||||||
|
},
|
||||||
|
"content_type": self._content_type_for(resolved_target.absolute) or "application/octet-stream",
|
||||||
|
}
|
||||||
|
|
||||||
|
def _prepare_zip_download(self, resolved_targets: list, download_name: str) -> dict:
|
||||||
|
self._validate_zip_download_archive_names(resolved_targets)
|
||||||
|
self._run_zip_download_preflight(resolved_targets)
|
||||||
|
|
||||||
|
buffer = BytesIO()
|
||||||
|
with zipfile.ZipFile(buffer, "w", compression=zipfile.ZIP_DEFLATED) as archive:
|
||||||
|
for resolved_target in resolved_targets:
|
||||||
|
self._write_download_target_to_zip(archive, resolved_target)
|
||||||
|
payload = buffer.getvalue()
|
||||||
|
|
||||||
|
async def _stream_zip():
|
||||||
|
yield payload
|
||||||
|
|
||||||
|
return {
|
||||||
|
"content": _stream_zip(),
|
||||||
|
"headers": {
|
||||||
|
"Content-Disposition": f'attachment; filename="{download_name}"',
|
||||||
|
},
|
||||||
|
"content_type": "application/zip",
|
||||||
|
}
|
||||||
|
|
||||||
|
def _validate_zip_download_archive_names(self, resolved_targets: list) -> None:
|
||||||
|
archive_names: set[str] = set()
|
||||||
|
for resolved_target in resolved_targets:
|
||||||
|
archive_name = resolved_target.absolute.name
|
||||||
|
if archive_name in archive_names:
|
||||||
|
raise AppError(
|
||||||
|
code="invalid_request",
|
||||||
|
message="Selected items must have distinct top-level names",
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
archive_names.add(archive_name)
|
||||||
|
|
||||||
|
def _download_name_for_targets(self, resolved_targets: list) -> str:
|
||||||
|
if len(resolved_targets) == 1 and resolved_targets[0].absolute.is_file():
|
||||||
|
return resolved_targets[0].absolute.name
|
||||||
|
if len(resolved_targets) == 1 and resolved_targets[0].absolute.is_dir():
|
||||||
|
return f"{resolved_targets[0].absolute.name}.zip"
|
||||||
|
return f"kodidownload-{datetime.now(timezone.utc).strftime('%Y%m%d-%H%M%S')}.zip"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _download_mode_from_request_paths(paths: list[str]) -> str:
|
||||||
|
return "multi_zip" if len(paths) > 1 else "single_file"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _download_mode_from_resolved_targets(resolved_targets: list) -> str:
|
||||||
|
if len(resolved_targets) == 1 and resolved_targets[0].absolute.is_file():
|
||||||
|
return "single_file"
|
||||||
|
if len(resolved_targets) == 1 and resolved_targets[0].absolute.is_dir():
|
||||||
|
return "single_directory_zip"
|
||||||
|
return "multi_zip"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _summarize_download_targets(paths: list[str]) -> str:
|
||||||
|
if not paths:
|
||||||
|
return "-"
|
||||||
|
if len(paths) == 1:
|
||||||
|
return paths[0]
|
||||||
|
if len(paths) == 2:
|
||||||
|
return f"{paths[0]}, {paths[1]}"
|
||||||
|
return f"{paths[0]}, {paths[1]}, +{len(paths) - 2} more"
|
||||||
|
|
||||||
|
def _record_download_status(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
status: str,
|
||||||
|
mode: str,
|
||||||
|
path_summary: str,
|
||||||
|
download_name: str | None,
|
||||||
|
history_entry_id: str | None = None,
|
||||||
|
) -> str | None:
|
||||||
|
if not self._history_repository:
|
||||||
|
return history_entry_id
|
||||||
|
if history_entry_id:
|
||||||
|
self._history_repository.update_entry(
|
||||||
|
entry_id=history_entry_id,
|
||||||
|
status=status,
|
||||||
|
error_code=None,
|
||||||
|
error_message=None,
|
||||||
|
finished_at=self._now_iso(),
|
||||||
|
)
|
||||||
|
return history_entry_id
|
||||||
|
created = self._history_repository.create_entry(
|
||||||
|
operation="download",
|
||||||
|
status=status,
|
||||||
|
source=mode,
|
||||||
|
destination=download_name,
|
||||||
|
path=path_summary,
|
||||||
|
finished_at=self._now_iso() if status != "requested" else None,
|
||||||
|
)
|
||||||
|
return created["id"]
|
||||||
|
|
||||||
|
def _record_download_failure(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
mode: str,
|
||||||
|
path_summary: str,
|
||||||
|
download_name: str | None,
|
||||||
|
error: AppError,
|
||||||
|
history_entry_id: str | None,
|
||||||
|
) -> None:
|
||||||
|
if not self._history_repository:
|
||||||
|
return
|
||||||
|
failure_status = "preflight_failed" if error.code == "download_preflight_failed" else "failed"
|
||||||
|
if history_entry_id:
|
||||||
|
self._history_repository.update_entry(
|
||||||
|
entry_id=history_entry_id,
|
||||||
|
status=failure_status,
|
||||||
|
error_code=error.code,
|
||||||
|
error_message=error.message,
|
||||||
|
finished_at=self._now_iso(),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
self._history_repository.create_entry(
|
||||||
|
operation="download",
|
||||||
|
status=failure_status,
|
||||||
|
source=mode,
|
||||||
|
destination=download_name,
|
||||||
|
path=path_summary,
|
||||||
|
error_code=error.code,
|
||||||
|
error_message=error.message,
|
||||||
|
finished_at=self._now_iso(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _run_zip_download_preflight(self, resolved_targets: list) -> None:
|
||||||
|
started_at = self._monotonic()
|
||||||
|
state = ZipDownloadPreflightState()
|
||||||
|
for resolved_target in resolved_targets:
|
||||||
|
self._ensure_zip_download_preflight_within_timeout(started_at)
|
||||||
|
self._validate_zip_download_root_target(resolved_target)
|
||||||
|
if resolved_target.absolute.is_file():
|
||||||
|
self._record_zip_download_file(
|
||||||
|
state=state,
|
||||||
|
entry_path=resolved_target.absolute,
|
||||||
|
entry_relative=resolved_target.relative,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
self._increment_zip_download_item_count(
|
||||||
|
state=state,
|
||||||
|
entry_relative=resolved_target.relative,
|
||||||
|
)
|
||||||
|
self._scan_zip_download_directory(
|
||||||
|
state=state,
|
||||||
|
resolved_target=resolved_target,
|
||||||
|
started_at=started_at,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _validate_zip_download_root_target(self, resolved_target) -> None:
|
||||||
|
_, _, lexical_source, _ = self._path_guard.resolve_lexical_path(resolved_target.relative)
|
||||||
|
if lexical_source.is_symlink():
|
||||||
|
self._raise_zip_download_preflight_error(
|
||||||
|
reason="symlink_detected",
|
||||||
|
details={"path": resolved_target.relative},
|
||||||
|
)
|
||||||
|
if resolved_target.absolute.is_file() or resolved_target.absolute.is_dir():
|
||||||
|
return
|
||||||
|
self._raise_zip_download_preflight_error(
|
||||||
|
reason="unsupported_path_type",
|
||||||
|
details={"path": resolved_target.relative},
|
||||||
|
)
|
||||||
|
|
||||||
|
def _scan_zip_download_directory(self, state: ZipDownloadPreflightState, resolved_target, started_at: float) -> None:
|
||||||
|
for root, dirnames, filenames in os.walk(resolved_target.absolute, followlinks=False):
|
||||||
|
root_path = Path(root)
|
||||||
|
dirnames.sort()
|
||||||
|
filenames.sort()
|
||||||
|
for name in [*dirnames, *filenames]:
|
||||||
|
self._ensure_zip_download_preflight_within_timeout(started_at)
|
||||||
|
entry_path = root_path / name
|
||||||
|
relative_suffix = entry_path.relative_to(resolved_target.absolute).as_posix()
|
||||||
|
entry_relative = self._join_relative(resolved_target.relative, relative_suffix)
|
||||||
|
if entry_path.is_symlink():
|
||||||
|
self._raise_zip_download_preflight_error(
|
||||||
|
reason="symlink_detected",
|
||||||
|
details={"path": entry_relative},
|
||||||
|
)
|
||||||
|
if entry_path.is_dir():
|
||||||
|
self._increment_zip_download_item_count(state=state, entry_relative=entry_relative)
|
||||||
|
continue
|
||||||
|
self._record_zip_download_file(
|
||||||
|
state=state,
|
||||||
|
entry_path=entry_path,
|
||||||
|
entry_relative=entry_relative,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _record_zip_download_file(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
state: ZipDownloadPreflightState,
|
||||||
|
entry_path: Path,
|
||||||
|
entry_relative: str,
|
||||||
|
) -> None:
|
||||||
|
self._increment_zip_download_item_count(state=state, entry_relative=entry_relative)
|
||||||
|
file_size = int(entry_path.stat().st_size)
|
||||||
|
if file_size > self._zip_download_preflight_limits.max_individual_file_bytes:
|
||||||
|
self._raise_zip_download_preflight_error(
|
||||||
|
reason="max_individual_file_size_exceeded",
|
||||||
|
details={
|
||||||
|
"path": entry_relative,
|
||||||
|
"limit_bytes": str(self._zip_download_preflight_limits.max_individual_file_bytes),
|
||||||
|
"actual_bytes": str(file_size),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
state.total_input_bytes += file_size
|
||||||
|
if state.total_input_bytes > self._zip_download_preflight_limits.max_total_input_bytes:
|
||||||
|
self._raise_zip_download_preflight_error(
|
||||||
|
reason="max_total_input_bytes_exceeded",
|
||||||
|
details={
|
||||||
|
"limit_bytes": str(self._zip_download_preflight_limits.max_total_input_bytes),
|
||||||
|
"actual_bytes": str(state.total_input_bytes),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def _increment_zip_download_item_count(self, *, state: ZipDownloadPreflightState, entry_relative: str) -> None:
|
||||||
|
state.item_count += 1
|
||||||
|
if state.item_count > self._zip_download_preflight_limits.max_items:
|
||||||
|
self._raise_zip_download_preflight_error(
|
||||||
|
reason="max_items_exceeded",
|
||||||
|
details={
|
||||||
|
"path": entry_relative,
|
||||||
|
"limit": str(self._zip_download_preflight_limits.max_items),
|
||||||
|
"actual": str(state.item_count),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def _ensure_zip_download_preflight_within_timeout(self, started_at: float) -> None:
|
||||||
|
elapsed = self._monotonic() - started_at
|
||||||
|
if elapsed > self._zip_download_preflight_limits.scan_timeout_seconds:
|
||||||
|
self._raise_zip_download_preflight_error(
|
||||||
|
reason="preflight_timeout",
|
||||||
|
details={
|
||||||
|
"timeout_seconds": str(self._zip_download_preflight_limits.scan_timeout_seconds),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _raise_zip_download_preflight_error(reason: str, details: dict[str, str]) -> None:
|
||||||
|
raise AppError(
|
||||||
|
code="download_preflight_failed",
|
||||||
|
message="Zip download preflight failed",
|
||||||
|
status_code=409,
|
||||||
|
details={"reason": reason, **details},
|
||||||
|
)
|
||||||
|
|
||||||
|
def _write_download_target_to_zip(self, archive: zipfile.ZipFile, resolved_target, on_each_item=None) -> None:
|
||||||
|
root_name = resolved_target.absolute.name
|
||||||
|
if resolved_target.absolute.is_file():
|
||||||
|
if on_each_item:
|
||||||
|
on_each_item()
|
||||||
|
archive.write(resolved_target.absolute, arcname=root_name)
|
||||||
|
return
|
||||||
|
|
||||||
|
archive.writestr(f"{root_name}/", b"")
|
||||||
|
for child in sorted(resolved_target.absolute.rglob("*")):
|
||||||
|
if on_each_item:
|
||||||
|
on_each_item()
|
||||||
|
arcname = f"{root_name}/{child.relative_to(resolved_target.absolute).as_posix()}"
|
||||||
|
if child.is_dir():
|
||||||
|
archive.writestr(f"{arcname}/", b"")
|
||||||
|
else:
|
||||||
|
archive.write(child, arcname=arcname)
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_range_header(range_header: str, file_size: int) -> tuple[int, int]:
|
def _parse_range_header(range_header: str, file_size: int) -> tuple[int, int]:
|
||||||
def invalid_range() -> AppError:
|
def invalid_range() -> AppError:
|
||||||
|
|||||||
@@ -45,11 +45,7 @@ class MoveTaskService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if item["kind"] == "directory":
|
if item["kind"] == "directory":
|
||||||
self._runner.enqueue_move_directory(
|
self._runner.enqueue_move_directory(task_id=task["id"], item=item)
|
||||||
task_id=task["id"],
|
|
||||||
source=item["source_absolute"],
|
|
||||||
destination=item["destination_absolute"],
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
self._runner.enqueue_move_file(
|
self._runner.enqueue_move_file(
|
||||||
task_id=task["id"],
|
task_id=task["id"],
|
||||||
@@ -57,6 +53,7 @@ class MoveTaskService:
|
|||||||
destination=item["destination_absolute"],
|
destination=item["destination_absolute"],
|
||||||
total_bytes=item["total_bytes"],
|
total_bytes=item["total_bytes"],
|
||||||
same_root=item["same_root"],
|
same_root=item["same_root"],
|
||||||
|
current_item=item["files"][0]["label"],
|
||||||
)
|
)
|
||||||
|
|
||||||
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
return TaskCreateResponse(task_id=task["id"], status=task["status"])
|
||||||
@@ -98,10 +95,11 @@ class MoveTaskService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
root_alias = next(iter(source_aliases))
|
root_alias = next(iter(source_aliases))
|
||||||
if root_alias != resolved_destination_base.alias:
|
has_directory = any(resolved_source.absolute.is_dir() for resolved_source in resolved_sources)
|
||||||
|
if root_alias != resolved_destination_base.alias and has_directory:
|
||||||
raise AppError(
|
raise AppError(
|
||||||
code="invalid_request",
|
code="invalid_request",
|
||||||
message="Cross-root batch directory move is not supported in v1",
|
message="Cross-root batch move with directories is not supported in v1",
|
||||||
status_code=400,
|
status_code=400,
|
||||||
details={"destination_base": destination_base},
|
details={"destination_base": destination_base},
|
||||||
)
|
)
|
||||||
@@ -113,6 +111,7 @@ class MoveTaskService:
|
|||||||
destination=destination,
|
destination=destination,
|
||||||
resolved_destination=resolved_destination_base,
|
resolved_destination=resolved_destination_base,
|
||||||
destination_base=destination_base,
|
destination_base=destination_base,
|
||||||
|
include_root_prefix=True,
|
||||||
)
|
)
|
||||||
items.append(item)
|
items.append(item)
|
||||||
|
|
||||||
@@ -137,6 +136,11 @@ class MoveTaskService:
|
|||||||
"source": item["source_absolute"],
|
"source": item["source_absolute"],
|
||||||
"destination": item["destination_absolute"],
|
"destination": item["destination_absolute"],
|
||||||
"kind": item["kind"],
|
"kind": item["kind"],
|
||||||
|
"same_root": item["same_root"],
|
||||||
|
"files": item["files"],
|
||||||
|
"directories": item["directories"],
|
||||||
|
"progress_total_items": item["progress_total_items"],
|
||||||
|
"progress_label": item["progress_label"],
|
||||||
}
|
}
|
||||||
for item in items
|
for item in items
|
||||||
],
|
],
|
||||||
@@ -149,6 +153,7 @@ class MoveTaskService:
|
|||||||
destination: str,
|
destination: str,
|
||||||
resolved_destination: ResolvedPath | None = None,
|
resolved_destination: ResolvedPath | None = None,
|
||||||
destination_base: str | None = None,
|
destination_base: str | None = None,
|
||||||
|
include_root_prefix: bool = False,
|
||||||
) -> dict:
|
) -> dict:
|
||||||
resolved_source = self._path_guard.resolve_existing_path(source)
|
resolved_source = self._path_guard.resolve_existing_path(source)
|
||||||
_, _, lexical_source, _ = self._path_guard.resolve_lexical_path(source)
|
_, _, lexical_source, _ = self._path_guard.resolve_lexical_path(source)
|
||||||
@@ -224,6 +229,23 @@ class MoveTaskService:
|
|||||||
details={"path": source, "destination": destination_relative},
|
details={"path": source, "destination": destination_relative},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
progress_label = resolved_source.absolute.name
|
||||||
|
if source_is_directory:
|
||||||
|
files = []
|
||||||
|
directories = []
|
||||||
|
if include_root_prefix:
|
||||||
|
progress_label = resolved_source.absolute.name
|
||||||
|
else:
|
||||||
|
files = [
|
||||||
|
{
|
||||||
|
"source": str(resolved_source.absolute),
|
||||||
|
"destination": str(destination_absolute),
|
||||||
|
"label": resolved_source.absolute.name,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
directories = []
|
||||||
|
progress_label = files[0]["label"]
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"source_relative": resolved_source.relative,
|
"source_relative": resolved_source.relative,
|
||||||
"destination_relative": destination_relative,
|
"destination_relative": destination_relative,
|
||||||
@@ -232,6 +254,10 @@ class MoveTaskService:
|
|||||||
"kind": "directory" if source_is_directory else "file",
|
"kind": "directory" if source_is_directory else "file",
|
||||||
"same_root": same_root,
|
"same_root": same_root,
|
||||||
"total_bytes": int(resolved_source.absolute.stat().st_size) if source_is_file else None,
|
"total_bytes": int(resolved_source.absolute.stat().st_size) if source_is_file else None,
|
||||||
|
"files": files,
|
||||||
|
"directories": directories,
|
||||||
|
"progress_total_items": 1,
|
||||||
|
"progress_label": progress_label,
|
||||||
}
|
}
|
||||||
|
|
||||||
def _map_directory_validation(self, relative_path: str) -> None:
|
def _map_directory_validation(self, relative_path: str) -> None:
|
||||||
|
|||||||
@@ -1,8 +1,24 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from backend.app.api.schemas import SettingsResponse, SettingsUpdateRequest
|
from backend.app.api.errors import AppError
|
||||||
|
from backend.app.api.schemas import SettingsResponse, SettingsUpdateRequest, ZipDownloadLimitsResponse
|
||||||
from backend.app.db.settings_repository import SettingsRepository
|
from backend.app.db.settings_repository import SettingsRepository
|
||||||
from backend.app.security.path_guard import PathGuard
|
from backend.app.security.path_guard import PathGuard
|
||||||
|
from backend.app.services.file_ops_service import ZIP_DOWNLOAD_PREFLIGHT_LIMITS
|
||||||
|
|
||||||
|
|
||||||
|
VALID_THEMES = {
|
||||||
|
"default",
|
||||||
|
"macos-soft",
|
||||||
|
"midnight",
|
||||||
|
"graphite",
|
||||||
|
"windows11",
|
||||||
|
"commander-electric",
|
||||||
|
"nord-arctic",
|
||||||
|
"catppuccin-soft",
|
||||||
|
"fluent-neon",
|
||||||
|
}
|
||||||
|
VALID_COLOR_MODES = {"dark", "light"}
|
||||||
|
|
||||||
|
|
||||||
class SettingsService:
|
class SettingsService:
|
||||||
@@ -15,10 +31,21 @@ class SettingsService:
|
|||||||
preferred_left = self._as_optional_str(values.get("preferred_startup_path_left"))
|
preferred_left = self._as_optional_str(values.get("preferred_startup_path_left"))
|
||||||
preferred_right = self._as_optional_str(values.get("preferred_startup_path_right"))
|
preferred_right = self._as_optional_str(values.get("preferred_startup_path_right"))
|
||||||
legacy_preferred = self._as_optional_str(values.get("preferred_startup_path"))
|
legacy_preferred = self._as_optional_str(values.get("preferred_startup_path"))
|
||||||
|
selected_theme = self._normalize_theme(values.get("selected_theme"))
|
||||||
|
selected_color_mode = self._normalize_color_mode(values.get("selected_color_mode"))
|
||||||
return SettingsResponse(
|
return SettingsResponse(
|
||||||
show_thumbnails=self._as_bool(values.get("show_thumbnails"), default=False),
|
show_thumbnails=self._as_bool(values.get("show_thumbnails"), default=False),
|
||||||
preferred_startup_path_left=preferred_left or legacy_preferred,
|
preferred_startup_path_left=preferred_left or legacy_preferred,
|
||||||
preferred_startup_path_right=preferred_right,
|
preferred_startup_path_right=preferred_right,
|
||||||
|
selected_theme=selected_theme,
|
||||||
|
selected_color_mode=selected_color_mode,
|
||||||
|
zip_download_limits=ZipDownloadLimitsResponse(
|
||||||
|
max_items=ZIP_DOWNLOAD_PREFLIGHT_LIMITS.max_items,
|
||||||
|
max_total_input_bytes=ZIP_DOWNLOAD_PREFLIGHT_LIMITS.max_total_input_bytes,
|
||||||
|
max_individual_file_bytes=ZIP_DOWNLOAD_PREFLIGHT_LIMITS.max_individual_file_bytes,
|
||||||
|
scan_timeout_seconds=ZIP_DOWNLOAD_PREFLIGHT_LIMITS.scan_timeout_seconds,
|
||||||
|
symlink_policy="not_allowed",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def update_settings(self, request: SettingsUpdateRequest) -> SettingsResponse:
|
def update_settings(self, request: SettingsUpdateRequest) -> SettingsResponse:
|
||||||
@@ -31,6 +58,12 @@ class SettingsService:
|
|||||||
if request.preferred_startup_path_right is not None:
|
if request.preferred_startup_path_right is not None:
|
||||||
self._set_directory_setting("preferred_startup_path_right", request.preferred_startup_path_right)
|
self._set_directory_setting("preferred_startup_path_right", request.preferred_startup_path_right)
|
||||||
|
|
||||||
|
if request.selected_theme is not None:
|
||||||
|
self._repository.set_setting("selected_theme", self._validate_theme(request.selected_theme))
|
||||||
|
|
||||||
|
if request.selected_color_mode is not None:
|
||||||
|
self._repository.set_setting("selected_color_mode", self._validate_color_mode(request.selected_color_mode))
|
||||||
|
|
||||||
return self.get_settings()
|
return self.get_settings()
|
||||||
|
|
||||||
def _set_directory_setting(self, key: str, value: str) -> None:
|
def _set_directory_setting(self, key: str, value: str) -> None:
|
||||||
@@ -53,3 +86,38 @@ class SettingsService:
|
|||||||
return None
|
return None
|
||||||
normalized = value.strip()
|
normalized = value.strip()
|
||||||
return normalized or None
|
return normalized or None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_theme(value: str | None) -> str:
|
||||||
|
normalized = (value or "").strip()
|
||||||
|
return normalized if normalized in VALID_THEMES else "default"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_color_mode(value: str | None) -> str:
|
||||||
|
normalized = (value or "").strip().lower()
|
||||||
|
return normalized if normalized in VALID_COLOR_MODES else "dark"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_theme(value: str) -> str:
|
||||||
|
normalized = value.strip()
|
||||||
|
if normalized not in VALID_THEMES:
|
||||||
|
raise AppError(
|
||||||
|
status_code=400,
|
||||||
|
code="invalid_request",
|
||||||
|
message=(
|
||||||
|
"Theme must be one of: default, macos-soft, midnight, graphite, windows11, "
|
||||||
|
"commander-electric, nord-arctic, catppuccin-soft, fluent-neon"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_color_mode(value: str) -> str:
|
||||||
|
normalized = value.strip().lower()
|
||||||
|
if normalized not in VALID_COLOR_MODES:
|
||||||
|
raise AppError(
|
||||||
|
status_code=400,
|
||||||
|
code="invalid_request",
|
||||||
|
message="Color mode must be one of: dark, light",
|
||||||
|
)
|
||||||
|
return normalized
|
||||||
|
|||||||
@@ -0,0 +1,14 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
|
from backend.app.db.task_repository import TaskRepository
|
||||||
|
|
||||||
|
|
||||||
|
def reconcile_persisted_incomplete_tasks(
|
||||||
|
task_repository: TaskRepository,
|
||||||
|
history_repository: HistoryRepository,
|
||||||
|
) -> list[str]:
|
||||||
|
task_ids = task_repository.reconcile_incomplete_tasks()
|
||||||
|
if task_ids:
|
||||||
|
history_repository.reconcile_entries_failed(task_ids)
|
||||||
|
return task_ids
|
||||||
@@ -2,12 +2,16 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from backend.app.api.errors import AppError
|
from backend.app.api.errors import AppError
|
||||||
from backend.app.api.schemas import TaskDetailResponse, TaskListItem, TaskListResponse
|
from backend.app.api.schemas import TaskDetailResponse, TaskListItem, TaskListResponse
|
||||||
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
from backend.app.db.task_repository import TaskRepository
|
from backend.app.db.task_repository import TaskRepository
|
||||||
|
|
||||||
|
FILE_ACTION_CANCELLABLE_OPERATIONS = {"copy", "move", "duplicate", "delete"}
|
||||||
|
|
||||||
|
|
||||||
class TaskService:
|
class TaskService:
|
||||||
def __init__(self, repository: TaskRepository):
|
def __init__(self, repository: TaskRepository, history_repository: HistoryRepository | None = None):
|
||||||
self._repository = repository
|
self._repository = repository
|
||||||
|
self._history_repository = history_repository
|
||||||
|
|
||||||
def create_task(self, operation: str, source: str, destination: str) -> TaskDetailResponse:
|
def create_task(self, operation: str, source: str, destination: str) -> TaskDetailResponse:
|
||||||
task = self._repository.create_task(operation=operation, source=source, destination=destination)
|
task = self._repository.create_task(operation=operation, source=source, destination=destination)
|
||||||
@@ -40,3 +44,41 @@ class TaskService:
|
|||||||
for task in tasks
|
for task in tasks
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def cancel_task(self, task_id: str) -> TaskDetailResponse:
|
||||||
|
task = self._repository.get_task(task_id)
|
||||||
|
if not task:
|
||||||
|
raise AppError(
|
||||||
|
code="task_not_found",
|
||||||
|
message="Task was not found",
|
||||||
|
status_code=404,
|
||||||
|
details={"task_id": task_id},
|
||||||
|
)
|
||||||
|
if task["operation"] not in FILE_ACTION_CANCELLABLE_OPERATIONS:
|
||||||
|
raise AppError(
|
||||||
|
code="task_not_cancellable",
|
||||||
|
message="Task cannot be cancelled",
|
||||||
|
status_code=409,
|
||||||
|
details={"task_id": task_id, "status": task["status"]},
|
||||||
|
)
|
||||||
|
if task["status"] not in {"queued", "running", "cancelling"}:
|
||||||
|
raise AppError(
|
||||||
|
code="task_not_cancellable",
|
||||||
|
message="Task cannot be cancelled",
|
||||||
|
status_code=409,
|
||||||
|
details={"task_id": task_id, "status": task["status"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
updated = self._repository.request_cancellation(task_id)
|
||||||
|
if not updated:
|
||||||
|
raise AppError(
|
||||||
|
code="task_not_cancellable",
|
||||||
|
message="Task cannot be cancelled",
|
||||||
|
status_code=409,
|
||||||
|
details={"task_id": task_id, "status": task["status"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
if updated["status"] == "cancelled" and self._history_repository:
|
||||||
|
self._history_repository.update_entry(entry_id=task_id, status="cancelled")
|
||||||
|
|
||||||
|
return TaskDetailResponse(**updated)
|
||||||
|
|||||||
+800
-122
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import threading
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -25,6 +26,21 @@ class FailingFilesystemAdapter(FilesystemAdapter):
|
|||||||
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
||||||
raise OSError("forced copy failure")
|
raise OSError("forced copy failure")
|
||||||
|
|
||||||
|
def copy_directory(self, source: str, destination: str) -> None:
|
||||||
|
raise OSError("forced copy failure")
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingCopyFilesystemAdapter(FilesystemAdapter):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.entered = threading.Event()
|
||||||
|
self.release = threading.Event()
|
||||||
|
|
||||||
|
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
||||||
|
self.entered.set()
|
||||||
|
self.release.wait(timeout=2.0)
|
||||||
|
return super().copy_file(source=source, destination=destination, on_progress=on_progress)
|
||||||
|
|
||||||
|
|
||||||
class CopyApiGoldenTest(unittest.TestCase):
|
class CopyApiGoldenTest(unittest.TestCase):
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
@@ -69,11 +85,21 @@ class CopyApiGoldenTest(unittest.TestCase):
|
|||||||
while time.time() < deadline:
|
while time.time() < deadline:
|
||||||
response = self._request("GET", f"/api/tasks/{task_id}")
|
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||||
body = response.json()
|
body = response.json()
|
||||||
if body["status"] in {"completed", "failed"}:
|
if body["status"] in {"completed", "failed", "cancelled"}:
|
||||||
return body
|
return body
|
||||||
time.sleep(0.02)
|
time.sleep(0.02)
|
||||||
self.fail("task did not reach terminal state in time")
|
self.fail("task did not reach terminal state in time")
|
||||||
|
|
||||||
|
def _wait_for_status(self, task_id: str, statuses: set[str], timeout_s: float = 2.0) -> dict:
|
||||||
|
deadline = time.time() + timeout_s
|
||||||
|
while time.time() < deadline:
|
||||||
|
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||||
|
body = response.json()
|
||||||
|
if body["status"] in statuses:
|
||||||
|
return body
|
||||||
|
time.sleep(0.02)
|
||||||
|
self.fail(f"task did not reach one of {sorted(statuses)} in time")
|
||||||
|
|
||||||
def test_copy_success_create_task_shape(self) -> None:
|
def test_copy_success_create_task_shape(self) -> None:
|
||||||
src = self.root / "source.txt"
|
src = self.root / "source.txt"
|
||||||
src.write_text("hello", encoding="utf-8")
|
src.write_text("hello", encoding="utf-8")
|
||||||
@@ -91,11 +117,137 @@ class CopyApiGoldenTest(unittest.TestCase):
|
|||||||
|
|
||||||
detail = self._wait_task(body["task_id"])
|
detail = self._wait_task(body["task_id"])
|
||||||
self.assertEqual(detail["status"], "completed")
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 1)
|
||||||
self.assertEqual(detail["total_bytes"], 5)
|
self.assertEqual(detail["total_bytes"], 5)
|
||||||
self.assertEqual(detail["done_bytes"], 5)
|
self.assertEqual(detail["done_bytes"], 5)
|
||||||
self.assertTrue((self.root / "copy.txt").exists())
|
self.assertTrue((self.root / "copy.txt").exists())
|
||||||
self.assertEqual((self.root / "copy.txt").read_text(encoding="utf-8"), "hello")
|
self.assertEqual((self.root / "copy.txt").read_text(encoding="utf-8"), "hello")
|
||||||
|
|
||||||
|
def test_copy_batch_multi_file_success(self) -> None:
|
||||||
|
(self.root / "a.txt").write_text("A", encoding="utf-8")
|
||||||
|
(self.root / "b.txt").write_text("B", encoding="utf-8")
|
||||||
|
(self.root / "dest").mkdir()
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/copy",
|
||||||
|
{
|
||||||
|
"sources": ["storage1/a.txt", "storage1/b.txt"],
|
||||||
|
"destination_base": "storage1/dest",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertEqual((self.root / "dest" / "a.txt").read_text(encoding="utf-8"), "A")
|
||||||
|
self.assertEqual((self.root / "dest" / "b.txt").read_text(encoding="utf-8"), "B")
|
||||||
|
|
||||||
|
def test_copy_single_directory_success(self) -> None:
|
||||||
|
src = self.root / "photos"
|
||||||
|
(src / "nested").mkdir(parents=True)
|
||||||
|
(src / "cover.jpg").write_text("img", encoding="utf-8")
|
||||||
|
(src / "nested" / "a.txt").write_text("nested", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/copy",
|
||||||
|
{"source": "storage1/photos", "destination": "storage1/photos-copy"},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertTrue((self.root / "photos-copy").is_dir())
|
||||||
|
self.assertEqual((self.root / "photos-copy" / "cover.jpg").read_text(encoding="utf-8"), "img")
|
||||||
|
self.assertEqual((self.root / "photos-copy" / "nested" / "a.txt").read_text(encoding="utf-8"), "nested")
|
||||||
|
|
||||||
|
def test_copy_batch_multi_directory_success(self) -> None:
|
||||||
|
(self.root / "dir1" / "sub").mkdir(parents=True)
|
||||||
|
(self.root / "dir2").mkdir()
|
||||||
|
(self.root / "dir1" / "sub" / "a.txt").write_text("A", encoding="utf-8")
|
||||||
|
(self.root / "dir2" / "b.txt").write_text("B", encoding="utf-8")
|
||||||
|
(self.root / "dest").mkdir()
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/copy",
|
||||||
|
{
|
||||||
|
"sources": ["storage1/dir1", "storage1/dir2"],
|
||||||
|
"destination_base": "storage1/dest",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertEqual((self.root / "dest" / "dir1" / "sub" / "a.txt").read_text(encoding="utf-8"), "A")
|
||||||
|
self.assertEqual((self.root / "dest" / "dir2" / "b.txt").read_text(encoding="utf-8"), "B")
|
||||||
|
|
||||||
|
def test_copy_batch_mixed_file_and_directory_success(self) -> None:
|
||||||
|
(self.root / "file.txt").write_text("F", encoding="utf-8")
|
||||||
|
(self.root / "docs" / "nested").mkdir(parents=True)
|
||||||
|
(self.root / "docs" / "nested" / "note.txt").write_text("N", encoding="utf-8")
|
||||||
|
(self.root / "dest").mkdir()
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/copy",
|
||||||
|
{
|
||||||
|
"sources": ["storage1/file.txt", "storage1/docs"],
|
||||||
|
"destination_base": "storage1/dest",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertEqual((self.root / "dest" / "file.txt").read_text(encoding="utf-8"), "F")
|
||||||
|
self.assertEqual((self.root / "dest" / "docs" / "nested" / "note.txt").read_text(encoding="utf-8"), "N")
|
||||||
|
|
||||||
|
def test_copy_batch_cancelled_after_current_file_finishes(self) -> None:
|
||||||
|
blocking_fs = BlockingCopyFilesystemAdapter()
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
||||||
|
self._set_services(path_guard=path_guard, filesystem=blocking_fs)
|
||||||
|
(self.root / "a.txt").write_text("A", encoding="utf-8")
|
||||||
|
(self.root / "b.txt").write_text("B", encoding="utf-8")
|
||||||
|
(self.root / "dest").mkdir()
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/copy",
|
||||||
|
{
|
||||||
|
"sources": ["storage1/a.txt", "storage1/b.txt"],
|
||||||
|
"destination_base": "storage1/dest",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
task_id = response.json()["task_id"]
|
||||||
|
self.assertTrue(blocking_fs.entered.wait(timeout=2.0))
|
||||||
|
running = self._wait_for_status(task_id, {"running"})
|
||||||
|
self.assertEqual(running["current_item"], "a.txt")
|
||||||
|
|
||||||
|
cancel_response = self._request("POST", f"/api/tasks/{task_id}/cancel")
|
||||||
|
self.assertEqual(cancel_response.status_code, 200)
|
||||||
|
self.assertEqual(cancel_response.json()["status"], "cancelling")
|
||||||
|
|
||||||
|
blocking_fs.release.set()
|
||||||
|
detail = self._wait_task(task_id)
|
||||||
|
self.assertEqual(detail["status"], "cancelled")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertTrue((self.root / "dest" / "a.txt").exists())
|
||||||
|
self.assertFalse((self.root / "dest" / "b.txt").exists())
|
||||||
|
|
||||||
def test_copy_source_not_found(self) -> None:
|
def test_copy_source_not_found(self) -> None:
|
||||||
response = self._request(
|
response = self._request(
|
||||||
"POST",
|
"POST",
|
||||||
@@ -115,18 +267,6 @@ class CopyApiGoldenTest(unittest.TestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_copy_source_is_directory_type_conflict(self) -> None:
|
|
||||||
(self.root / "dir").mkdir()
|
|
||||||
|
|
||||||
response = self._request(
|
|
||||||
"POST",
|
|
||||||
"/api/files/copy",
|
|
||||||
{"source": "storage1/dir", "destination": "storage1/out.txt"},
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 409)
|
|
||||||
self.assertEqual(response.json()["error"]["code"], "type_conflict")
|
|
||||||
|
|
||||||
def test_copy_destination_exists_already_exists(self) -> None:
|
def test_copy_destination_exists_already_exists(self) -> None:
|
||||||
(self.root / "source.txt").write_text("x", encoding="utf-8")
|
(self.root / "source.txt").write_text("x", encoding="utf-8")
|
||||||
(self.root / "exists.txt").write_text("y", encoding="utf-8")
|
(self.root / "exists.txt").write_text("y", encoding="utf-8")
|
||||||
@@ -149,6 +289,38 @@ class CopyApiGoldenTest(unittest.TestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_copy_directory_destination_exists_already_exists(self) -> None:
|
||||||
|
(self.root / "src").mkdir()
|
||||||
|
(self.root / "src" / "a.txt").write_text("x", encoding="utf-8")
|
||||||
|
(self.root / "exists").mkdir()
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/copy",
|
||||||
|
{"source": "storage1/src", "destination": "storage1/exists"},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "already_exists")
|
||||||
|
|
||||||
|
def test_copy_batch_destination_exists_already_exists(self) -> None:
|
||||||
|
(self.root / "a.txt").write_text("A", encoding="utf-8")
|
||||||
|
(self.root / "dest").mkdir()
|
||||||
|
(self.root / "dest" / "a.txt").write_text("exists", encoding="utf-8")
|
||||||
|
(self.root / "b.txt").write_text("B", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/copy",
|
||||||
|
{
|
||||||
|
"sources": ["storage1/a.txt", "storage1/b.txt"],
|
||||||
|
"destination_base": "storage1/dest",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "already_exists")
|
||||||
|
|
||||||
def test_copy_traversal_source(self) -> None:
|
def test_copy_traversal_source(self) -> None:
|
||||||
response = self._request(
|
response = self._request(
|
||||||
"POST",
|
"POST",
|
||||||
@@ -171,6 +343,31 @@ class CopyApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertEqual(response.status_code, 403)
|
self.assertEqual(response.status_code, 403)
|
||||||
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
||||||
|
|
||||||
|
def test_copy_invalid_root_alias(self) -> None:
|
||||||
|
(self.root / "source.txt").write_text("x", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/copy",
|
||||||
|
{"source": "storage1/source.txt", "destination": "unknown/out.txt"},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 403)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "invalid_root_alias")
|
||||||
|
|
||||||
|
def test_copy_destination_inside_directory_source_blocked(self) -> None:
|
||||||
|
(self.root / "src").mkdir()
|
||||||
|
(self.root / "src" / "a.txt").write_text("x", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/copy",
|
||||||
|
{"source": "storage1/src", "destination": "storage1/src/child"},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "invalid_request")
|
||||||
|
|
||||||
def test_copy_source_symlink_rejected(self) -> None:
|
def test_copy_source_symlink_rejected(self) -> None:
|
||||||
target = self.root / "real.txt"
|
target = self.root / "real.txt"
|
||||||
target.write_text("x", encoding="utf-8")
|
target.write_text("x", encoding="utf-8")
|
||||||
|
|||||||
@@ -0,0 +1,344 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import unittest
|
||||||
|
import zipfile
|
||||||
|
from io import BytesIO
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||||
|
|
||||||
|
from backend.app.dependencies import get_archive_download_task_service, get_file_ops_service, get_task_service
|
||||||
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
|
from backend.app.db.task_repository import TaskRepository
|
||||||
|
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||||
|
from backend.app.main import app
|
||||||
|
from backend.app.security.path_guard import PathGuard
|
||||||
|
from backend.app.services.archive_download_task_service import ArchiveDownloadTaskService
|
||||||
|
from backend.app.services.file_ops_service import FileOpsService, ZipDownloadPreflightLimits
|
||||||
|
from backend.app.services.task_service import TaskService
|
||||||
|
from backend.app.tasks_runner import TaskRunner
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingArchiveFileOpsService(FileOpsService):
|
||||||
|
def __init__(self, *args, gate: threading.Event, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._gate = gate
|
||||||
|
|
||||||
|
def _run_zip_download_preflight(self, resolved_targets: list) -> None:
|
||||||
|
super()._run_zip_download_preflight(resolved_targets)
|
||||||
|
self._gate.wait(timeout=2.0)
|
||||||
|
|
||||||
|
|
||||||
|
class FailingArchiveFileOpsService(FileOpsService):
|
||||||
|
def _write_download_target_to_zip(self, archive: zipfile.ZipFile, resolved_target, on_each_item=None) -> None:
|
||||||
|
archive.writestr("partial.txt", b"partial")
|
||||||
|
raise OSError("forced archive failure")
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingArchiveBuildFileOpsService(FileOpsService):
|
||||||
|
def __init__(self, *args, entered: threading.Event, release: threading.Event, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._entered = entered
|
||||||
|
self._release = release
|
||||||
|
|
||||||
|
def _write_download_target_to_zip(self, archive: zipfile.ZipFile, resolved_target, on_each_item=None) -> None:
|
||||||
|
archive.writestr("partial.txt", b"partial")
|
||||||
|
self._entered.set()
|
||||||
|
self._release.wait(timeout=2.0)
|
||||||
|
if on_each_item:
|
||||||
|
on_each_item()
|
||||||
|
super()._write_download_target_to_zip(archive, resolved_target, on_each_item=on_each_item)
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadApiGoldenTest(unittest.TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
|
self.root = Path(self.temp_dir.name) / "root"
|
||||||
|
self.root.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.db_path = str(Path(self.temp_dir.name) / "tasks.db")
|
||||||
|
self.artifact_root = Path(self.temp_dir.name) / "archive_tmp"
|
||||||
|
self.path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
||||||
|
self.filesystem = FilesystemAdapter()
|
||||||
|
self.task_repo = TaskRepository(self.db_path)
|
||||||
|
self.history_repo = HistoryRepository(self.db_path)
|
||||||
|
self._override_services()
|
||||||
|
|
||||||
|
def tearDown(self) -> None:
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
self.temp_dir.cleanup()
|
||||||
|
|
||||||
|
def _override_services(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
file_ops_service: FileOpsService | None = None,
|
||||||
|
artifact_ttl_seconds: int = 1800,
|
||||||
|
) -> None:
|
||||||
|
file_ops_service = file_ops_service or FileOpsService(
|
||||||
|
path_guard=self.path_guard,
|
||||||
|
filesystem=self.filesystem,
|
||||||
|
history_repository=self.history_repo,
|
||||||
|
zip_download_preflight_limits=ZipDownloadPreflightLimits(),
|
||||||
|
)
|
||||||
|
runner = TaskRunner(repository=self.task_repo, filesystem=self.filesystem, history_repository=self.history_repo)
|
||||||
|
archive_service = ArchiveDownloadTaskService(
|
||||||
|
path_guard=self.path_guard,
|
||||||
|
repository=self.task_repo,
|
||||||
|
runner=runner,
|
||||||
|
history_repository=self.history_repo,
|
||||||
|
file_ops_service=file_ops_service,
|
||||||
|
artifact_root=self.artifact_root,
|
||||||
|
artifact_ttl_seconds=artifact_ttl_seconds,
|
||||||
|
)
|
||||||
|
task_service = TaskService(repository=self.task_repo)
|
||||||
|
|
||||||
|
async def _override_file_ops_service() -> FileOpsService:
|
||||||
|
return file_ops_service
|
||||||
|
|
||||||
|
async def _override_archive_service() -> ArchiveDownloadTaskService:
|
||||||
|
return archive_service
|
||||||
|
|
||||||
|
async def _override_task_service() -> TaskService:
|
||||||
|
return task_service
|
||||||
|
|
||||||
|
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||||
|
app.dependency_overrides[get_archive_download_task_service] = _override_archive_service
|
||||||
|
app.dependency_overrides[get_task_service] = _override_task_service
|
||||||
|
|
||||||
|
def _request(self, method: str, url: str, payload: dict | None = None) -> httpx.Response:
|
||||||
|
async def _run() -> httpx.Response:
|
||||||
|
transport = httpx.ASGITransport(app=app)
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||||
|
if method == "GET":
|
||||||
|
return await client.get(url)
|
||||||
|
return await client.post(url, json=payload)
|
||||||
|
|
||||||
|
return asyncio.run(_run())
|
||||||
|
|
||||||
|
def _wait_for_task_status(self, task_id: str, statuses: set[str], timeout_s: float = 2.0) -> dict:
|
||||||
|
deadline = time.time() + timeout_s
|
||||||
|
while time.time() < deadline:
|
||||||
|
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||||
|
body = response.json()
|
||||||
|
if body["status"] in statuses:
|
||||||
|
return body
|
||||||
|
time.sleep(0.02)
|
||||||
|
self.fail("task did not reach expected status in time")
|
||||||
|
|
||||||
|
def test_download_success_for_allowed_file(self) -> None:
|
||||||
|
src = self.root / "report.txt"
|
||||||
|
src.write_text("hello download", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request("GET", "/api/files/download?path=storage1/report.txt")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual(response.content, b"hello download")
|
||||||
|
self.assertIn('attachment; filename="report.txt"', response.headers.get("content-disposition", ""))
|
||||||
|
self.assertEqual(response.headers.get("content-type"), "text/plain; charset=utf-8")
|
||||||
|
|
||||||
|
def test_archive_prepare_single_directory_ends_ready(self) -> None:
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
|
||||||
|
created = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/docs"]})
|
||||||
|
|
||||||
|
self.assertEqual(created.status_code, 202)
|
||||||
|
task = self._wait_for_task_status(created.json()["task_id"], {"ready"})
|
||||||
|
self.assertEqual(task["operation"], "download")
|
||||||
|
self.assertEqual(task["status"], "ready")
|
||||||
|
self.assertEqual(task["destination"], "docs.zip")
|
||||||
|
|
||||||
|
def test_archive_prepare_multi_mixed_selection_ends_ready(self) -> None:
|
||||||
|
(self.root / "readme.txt").write_text("R", encoding="utf-8")
|
||||||
|
(self.root / "photos").mkdir()
|
||||||
|
(self.root / "photos" / "img.txt").write_text("P", encoding="utf-8")
|
||||||
|
|
||||||
|
created = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/download/archive-prepare",
|
||||||
|
{"paths": ["storage1/readme.txt", "storage1/photos"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(created.status_code, 202)
|
||||||
|
task = self._wait_for_task_status(created.json()["task_id"], {"ready"})
|
||||||
|
self.assertEqual(task["status"], "ready")
|
||||||
|
self.assertEqual(task["source"], "storage1/readme.txt, storage1/photos")
|
||||||
|
self.assertRegex(task["destination"], r'^kodidownload-\d{8}-\d{6}\.zip$')
|
||||||
|
|
||||||
|
def test_archive_retrieval_from_ready_task_works(self) -> None:
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
created = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/docs"]})
|
||||||
|
task = self._wait_for_task_status(created.json()["task_id"], {"ready"})
|
||||||
|
|
||||||
|
response = self._request("GET", f"/api/files/download/archive/{task['id']}")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertIn('attachment; filename="docs.zip"', response.headers.get("content-disposition", ""))
|
||||||
|
with zipfile.ZipFile(BytesIO(response.content)) as archive:
|
||||||
|
self.assertIn("docs/", archive.namelist())
|
||||||
|
self.assertIn("docs/a.txt", archive.namelist())
|
||||||
|
self.assertEqual(archive.read("docs/a.txt"), b"a")
|
||||||
|
|
||||||
|
def test_archive_retrieval_before_ready_rejected(self) -> None:
|
||||||
|
gate = threading.Event()
|
||||||
|
file_ops_service = BlockingArchiveFileOpsService(
|
||||||
|
path_guard=self.path_guard,
|
||||||
|
filesystem=self.filesystem,
|
||||||
|
history_repository=self.history_repo,
|
||||||
|
zip_download_preflight_limits=ZipDownloadPreflightLimits(),
|
||||||
|
gate=gate,
|
||||||
|
)
|
||||||
|
self._override_services(file_ops_service=file_ops_service)
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
created = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/docs"]})
|
||||||
|
task = self._wait_for_task_status(created.json()["task_id"], {"requested", "preparing"})
|
||||||
|
|
||||||
|
response = self._request("GET", f"/api/files/download/archive/{task['id']}")
|
||||||
|
|
||||||
|
gate.set()
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "download_not_ready")
|
||||||
|
|
||||||
|
def test_archive_preflight_failure_sets_failed_and_error_code(self) -> None:
|
||||||
|
target = self.root / "real.txt"
|
||||||
|
target.write_text("x", encoding="utf-8")
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "link.txt").symlink_to(target)
|
||||||
|
|
||||||
|
created = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/docs"]})
|
||||||
|
task = self._wait_for_task_status(created.json()["task_id"], {"failed"})
|
||||||
|
|
||||||
|
self.assertEqual(task["status"], "failed")
|
||||||
|
self.assertEqual(task["error_code"], "download_preflight_failed")
|
||||||
|
|
||||||
|
def test_archive_failure_removes_partial_artifact(self) -> None:
|
||||||
|
file_ops_service = FailingArchiveFileOpsService(
|
||||||
|
path_guard=self.path_guard,
|
||||||
|
filesystem=self.filesystem,
|
||||||
|
history_repository=self.history_repo,
|
||||||
|
zip_download_preflight_limits=ZipDownloadPreflightLimits(),
|
||||||
|
)
|
||||||
|
self._override_services(file_ops_service=file_ops_service)
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
|
||||||
|
created = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/docs"]})
|
||||||
|
task = self._wait_for_task_status(created.json()["task_id"], {"failed"})
|
||||||
|
|
||||||
|
self.assertEqual(task["error_code"], "io_error")
|
||||||
|
self.assertEqual(list(self.artifact_root.glob("*")), [])
|
||||||
|
|
||||||
|
def test_archive_cancel_during_preparing_sets_cancelled_and_removes_partial_artifact(self) -> None:
|
||||||
|
entered = threading.Event()
|
||||||
|
release = threading.Event()
|
||||||
|
file_ops_service = BlockingArchiveBuildFileOpsService(
|
||||||
|
path_guard=self.path_guard,
|
||||||
|
filesystem=self.filesystem,
|
||||||
|
history_repository=self.history_repo,
|
||||||
|
zip_download_preflight_limits=ZipDownloadPreflightLimits(),
|
||||||
|
entered=entered,
|
||||||
|
release=release,
|
||||||
|
)
|
||||||
|
self._override_services(file_ops_service=file_ops_service)
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
|
||||||
|
created = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/docs"]})
|
||||||
|
|
||||||
|
self.assertEqual(created.status_code, 202)
|
||||||
|
self.assertTrue(entered.wait(timeout=2.0))
|
||||||
|
response = self._request("POST", f"/api/files/download/archive/{created.json()['task_id']}/cancel")
|
||||||
|
release.set()
|
||||||
|
task = self._wait_for_task_status(created.json()["task_id"], {"cancelled"})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual(response.json()["status"], "cancelled")
|
||||||
|
self.assertEqual(task["status"], "cancelled")
|
||||||
|
self.assertEqual(list(self.artifact_root.glob("*")), [])
|
||||||
|
|
||||||
|
def test_archive_retrieval_for_cancelled_task_rejected(self) -> None:
|
||||||
|
entered = threading.Event()
|
||||||
|
release = threading.Event()
|
||||||
|
file_ops_service = BlockingArchiveBuildFileOpsService(
|
||||||
|
path_guard=self.path_guard,
|
||||||
|
filesystem=self.filesystem,
|
||||||
|
history_repository=self.history_repo,
|
||||||
|
zip_download_preflight_limits=ZipDownloadPreflightLimits(),
|
||||||
|
entered=entered,
|
||||||
|
release=release,
|
||||||
|
)
|
||||||
|
self._override_services(file_ops_service=file_ops_service)
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
|
||||||
|
created = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/docs"]})
|
||||||
|
|
||||||
|
self.assertTrue(entered.wait(timeout=2.0))
|
||||||
|
cancel_response = self._request("POST", f"/api/files/download/archive/{created.json()['task_id']}/cancel")
|
||||||
|
release.set()
|
||||||
|
response = self._request("GET", f"/api/files/download/archive/{created.json()['task_id']}")
|
||||||
|
|
||||||
|
self.assertEqual(cancel_response.status_code, 200)
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "download_cancelled")
|
||||||
|
|
||||||
|
def test_archive_cancel_after_ready_rejected(self) -> None:
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
created = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/docs"]})
|
||||||
|
task = self._wait_for_task_status(created.json()["task_id"], {"ready"})
|
||||||
|
|
||||||
|
response = self._request("POST", f"/api/files/download/archive/{task['id']}/cancel")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "download_not_cancellable")
|
||||||
|
|
||||||
|
def test_expired_artifact_rejected_and_removed(self) -> None:
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
self._override_services(artifact_ttl_seconds=1)
|
||||||
|
created = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/docs"]})
|
||||||
|
task = self._wait_for_task_status(created.json()["task_id"], {"ready"})
|
||||||
|
artifact = self.task_repo.get_artifact(task["id"])
|
||||||
|
self.task_repo.upsert_artifact(
|
||||||
|
task_id=task["id"],
|
||||||
|
file_path=artifact["file_path"],
|
||||||
|
file_name=artifact["file_name"],
|
||||||
|
expires_at="2000-01-01T00:00:00Z",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._request("GET", f"/api/files/download/archive/{task['id']}")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 410)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "archive_expired")
|
||||||
|
self.assertIsNone(self.task_repo.get_artifact(task["id"]))
|
||||||
|
self.assertFalse(Path(artifact["file_path"]).exists())
|
||||||
|
|
||||||
|
def test_archive_prepare_rejects_single_file(self) -> None:
|
||||||
|
(self.root / "report.txt").write_text("hello download", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request("POST", "/api/files/download/archive-prepare", {"paths": ["storage1/report.txt"]})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "invalid_request")
|
||||||
|
|
||||||
|
def test_direct_archive_download_route_rejected(self) -> None:
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request("GET", "/api/files/download?path=storage1/docs")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "invalid_request")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -0,0 +1,255 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||||
|
|
||||||
|
from backend.app.dependencies import get_duplicate_task_service, get_task_service
|
||||||
|
from backend.app.db.task_repository import TaskRepository
|
||||||
|
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||||
|
from backend.app.main import app
|
||||||
|
from backend.app.security.path_guard import PathGuard
|
||||||
|
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
||||||
|
from backend.app.services.task_service import TaskService
|
||||||
|
from backend.app.tasks_runner import TaskRunner
|
||||||
|
|
||||||
|
|
||||||
|
class FailOnSecondCopyFilesystemAdapter(FilesystemAdapter):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._copy_calls = 0
|
||||||
|
|
||||||
|
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
||||||
|
self._copy_calls += 1
|
||||||
|
if self._copy_calls == 2:
|
||||||
|
raise OSError("forced duplicate failure")
|
||||||
|
super().copy_file(source=source, destination=destination, on_progress=on_progress)
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingDuplicateFilesystemAdapter(FilesystemAdapter):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.entered = threading.Event()
|
||||||
|
self.release = threading.Event()
|
||||||
|
|
||||||
|
def copy_file(self, source: str, destination: str, on_progress: callable | None = None) -> None:
|
||||||
|
self.entered.set()
|
||||||
|
self.release.wait(timeout=2.0)
|
||||||
|
super().copy_file(source=source, destination=destination, on_progress=on_progress)
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateApiGoldenTest(unittest.TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
|
self.root = Path(self.temp_dir.name) / "root"
|
||||||
|
self.root.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.repo = TaskRepository(str(Path(self.temp_dir.name) / "tasks.db"))
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
||||||
|
self._set_services(path_guard=path_guard, filesystem=FilesystemAdapter())
|
||||||
|
|
||||||
|
def tearDown(self) -> None:
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
self.temp_dir.cleanup()
|
||||||
|
|
||||||
|
def _set_services(self, path_guard: PathGuard, filesystem: FilesystemAdapter) -> None:
|
||||||
|
runner = TaskRunner(repository=self.repo, filesystem=filesystem)
|
||||||
|
duplicate_service = DuplicateTaskService(path_guard=path_guard, repository=self.repo, runner=runner)
|
||||||
|
task_service = TaskService(repository=self.repo)
|
||||||
|
|
||||||
|
async def _override_duplicate_service() -> DuplicateTaskService:
|
||||||
|
return duplicate_service
|
||||||
|
|
||||||
|
async def _override_task_service() -> TaskService:
|
||||||
|
return task_service
|
||||||
|
|
||||||
|
app.dependency_overrides[get_duplicate_task_service] = _override_duplicate_service
|
||||||
|
app.dependency_overrides[get_task_service] = _override_task_service
|
||||||
|
|
||||||
|
def _request(self, method: str, url: str, payload: dict | None = None) -> httpx.Response:
|
||||||
|
async def _run() -> httpx.Response:
|
||||||
|
transport = httpx.ASGITransport(app=app)
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||||
|
if method == "POST":
|
||||||
|
return await client.post(url, json=payload)
|
||||||
|
return await client.get(url)
|
||||||
|
|
||||||
|
return asyncio.run(_run())
|
||||||
|
|
||||||
|
def _wait_task(self, task_id: str, timeout_s: float = 2.0) -> dict:
|
||||||
|
deadline = time.time() + timeout_s
|
||||||
|
while time.time() < deadline:
|
||||||
|
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||||
|
body = response.json()
|
||||||
|
if body["status"] in {"completed", "failed", "cancelled"}:
|
||||||
|
return body
|
||||||
|
time.sleep(0.02)
|
||||||
|
self.fail("task did not reach terminal state in time")
|
||||||
|
|
||||||
|
def _wait_for_status(self, task_id: str, statuses: set[str], timeout_s: float = 2.0) -> dict:
|
||||||
|
deadline = time.time() + timeout_s
|
||||||
|
while time.time() < deadline:
|
||||||
|
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||||
|
body = response.json()
|
||||||
|
if body["status"] in statuses:
|
||||||
|
return body
|
||||||
|
time.sleep(0.02)
|
||||||
|
self.fail(f"task did not reach one of {sorted(statuses)} in time")
|
||||||
|
|
||||||
|
def test_duplicate_single_file_success(self) -> None:
|
||||||
|
(self.root / "note.txt").write_text("hello", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/note.txt"]})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["operation"], "duplicate")
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 1)
|
||||||
|
self.assertEqual(detail["source"], "storage1/note.txt")
|
||||||
|
self.assertEqual(detail["destination"], "storage1/note copy.txt")
|
||||||
|
self.assertEqual((self.root / "note copy.txt").read_text(encoding="utf-8"), "hello")
|
||||||
|
|
||||||
|
def test_duplicate_single_directory_success(self) -> None:
|
||||||
|
(self.root / "Folder" / "nested").mkdir(parents=True)
|
||||||
|
(self.root / "Folder" / "alpha.txt").write_text("A", encoding="utf-8")
|
||||||
|
(self.root / "Folder" / "nested" / "beta.txt").write_text("B", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/Folder"]})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertTrue((self.root / "Folder copy").is_dir())
|
||||||
|
self.assertEqual((self.root / "Folder copy" / "alpha.txt").read_text(encoding="utf-8"), "A")
|
||||||
|
self.assertEqual((self.root / "Folder copy" / "nested" / "beta.txt").read_text(encoding="utf-8"), "B")
|
||||||
|
|
||||||
|
def test_duplicate_multi_select_success(self) -> None:
|
||||||
|
(self.root / "a.txt").write_text("A", encoding="utf-8")
|
||||||
|
(self.root / "docs" / "nested").mkdir(parents=True)
|
||||||
|
(self.root / "docs" / "nested" / "b.txt").write_text("B", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/duplicate",
|
||||||
|
{"paths": ["storage1/a.txt", "storage1/docs"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertEqual(detail["source"], "2 items")
|
||||||
|
self.assertEqual(detail["destination"], "same directory")
|
||||||
|
self.assertEqual((self.root / "a copy.txt").read_text(encoding="utf-8"), "A")
|
||||||
|
self.assertEqual((self.root / "docs copy" / "nested" / "b.txt").read_text(encoding="utf-8"), "B")
|
||||||
|
|
||||||
|
def test_duplicate_multi_select_cancelled_after_current_item_finishes(self) -> None:
|
||||||
|
blocking_fs = BlockingDuplicateFilesystemAdapter()
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
||||||
|
self._set_services(path_guard=path_guard, filesystem=blocking_fs)
|
||||||
|
(self.root / "a.txt").write_text("A", encoding="utf-8")
|
||||||
|
(self.root / "b.txt").write_text("B", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/duplicate",
|
||||||
|
{"paths": ["storage1/a.txt", "storage1/b.txt"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
task_id = response.json()["task_id"]
|
||||||
|
self.assertTrue(blocking_fs.entered.wait(timeout=2.0))
|
||||||
|
running = self._wait_for_status(task_id, {"running"})
|
||||||
|
self.assertEqual(running["current_item"], "a.txt")
|
||||||
|
|
||||||
|
cancel_response = self._request("POST", f"/api/tasks/{task_id}/cancel")
|
||||||
|
self.assertEqual(cancel_response.status_code, 200)
|
||||||
|
self.assertEqual(cancel_response.json()["status"], "cancelling")
|
||||||
|
|
||||||
|
blocking_fs.release.set()
|
||||||
|
detail = self._wait_task(task_id)
|
||||||
|
self.assertEqual(detail["status"], "cancelled")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertTrue((self.root / "a copy.txt").exists())
|
||||||
|
self.assertFalse((self.root / "b copy.txt").exists())
|
||||||
|
|
||||||
|
def test_duplicate_collision_resolution_for_files_and_directories(self) -> None:
|
||||||
|
(self.root / "report.txt").write_text("R", encoding="utf-8")
|
||||||
|
(self.root / "report copy.txt").write_text("existing", encoding="utf-8")
|
||||||
|
(self.root / "report copy 2.txt").write_text("existing", encoding="utf-8")
|
||||||
|
(self.root / "Album").mkdir()
|
||||||
|
(self.root / "Album copy").mkdir()
|
||||||
|
(self.root / "Album copy 2").mkdir()
|
||||||
|
|
||||||
|
file_response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/report.txt"]})
|
||||||
|
dir_response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/Album"]})
|
||||||
|
|
||||||
|
self.assertEqual(self._wait_task(file_response.json()["task_id"])["destination"], "storage1/report copy 3.txt")
|
||||||
|
self.assertEqual(self._wait_task(dir_response.json()["task_id"])["destination"], "storage1/Album copy 3")
|
||||||
|
self.assertTrue((self.root / "report copy 3.txt").exists())
|
||||||
|
self.assertTrue((self.root / "Album copy 3").is_dir())
|
||||||
|
|
||||||
|
def test_duplicate_skips_top_level_macos_sidecar(self) -> None:
|
||||||
|
(self.root / "._note.txt").write_text("sidecar", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/._note.txt"]})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 0)
|
||||||
|
self.assertEqual(detail["total_items"], 0)
|
||||||
|
self.assertFalse((self.root / "._note copy.txt").exists())
|
||||||
|
|
||||||
|
def test_duplicate_skips_macos_sidecars_inside_directory_tree(self) -> None:
|
||||||
|
(self.root / "docs" / "nested").mkdir(parents=True)
|
||||||
|
(self.root / "docs" / "keep.txt").write_text("keep", encoding="utf-8")
|
||||||
|
(self.root / "docs" / "._skip.txt").write_text("skip", encoding="utf-8")
|
||||||
|
(self.root / "docs" / "nested" / "._nested.txt").write_text("skip", encoding="utf-8")
|
||||||
|
(self.root / "docs" / "nested" / "real.txt").write_text("real", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/docs"]})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertTrue((self.root / "docs copy" / "keep.txt").exists())
|
||||||
|
self.assertTrue((self.root / "docs copy" / "nested" / "real.txt").exists())
|
||||||
|
self.assertFalse((self.root / "docs copy" / "._skip.txt").exists())
|
||||||
|
self.assertFalse((self.root / "docs copy" / "nested" / "._nested.txt").exists())
|
||||||
|
|
||||||
|
def test_duplicate_failure_removes_partial_directory_artifact(self) -> None:
|
||||||
|
(self.root / "docs").mkdir()
|
||||||
|
(self.root / "docs" / "a.txt").write_text("A", encoding="utf-8")
|
||||||
|
(self.root / "docs" / "b.txt").write_text("B", encoding="utf-8")
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root), "storage2": str(self.root)})
|
||||||
|
self._set_services(path_guard=path_guard, filesystem=FailOnSecondCopyFilesystemAdapter())
|
||||||
|
|
||||||
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/docs"]})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "failed")
|
||||||
|
self.assertEqual(detail["error_code"], "io_error")
|
||||||
|
self.assertFalse((self.root / "docs copy").exists())
|
||||||
|
|
||||||
|
def test_duplicate_path_safety_stays_intact(self) -> None:
|
||||||
|
response = self._request("POST", "/api/files/duplicate", {"paths": ["storage1/../escape.txt"]})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 403)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -80,6 +80,19 @@ class EditApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertEqual(body["content_type"], "text/x-python")
|
self.assertEqual(body["content_type"], "text/x-python")
|
||||||
self.assertEqual(body["content"], "print('hello')\n")
|
self.assertEqual(body["content"], "print('hello')\n")
|
||||||
|
|
||||||
|
def test_edit_view_conf_success(self) -> None:
|
||||||
|
file_path = self.root / "app.conf"
|
||||||
|
file_path.write_text("enabled=true\n", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request("GET", "/api/files/view", params={"path": "storage1/app.conf", "for_edit": "true"})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["path"], "storage1/app.conf")
|
||||||
|
self.assertEqual(body["name"], "app.conf")
|
||||||
|
self.assertEqual(body["content_type"], "text/plain")
|
||||||
|
self.assertEqual(body["content"], "enabled=true\n")
|
||||||
|
|
||||||
def test_save_success(self) -> None:
|
def test_save_success(self) -> None:
|
||||||
file_path = self.root / "notes.txt"
|
file_path = self.root / "notes.txt"
|
||||||
file_path.write_text("hello", encoding="utf-8")
|
file_path.write_text("hello", encoding="utf-8")
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@@ -10,11 +12,27 @@ import httpx
|
|||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||||
|
|
||||||
from backend.app.dependencies import get_file_ops_service
|
from backend.app.dependencies import get_delete_task_service, get_file_ops_service, get_task_service
|
||||||
|
from backend.app.db.task_repository import TaskRepository
|
||||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||||
from backend.app.main import app
|
from backend.app.main import app
|
||||||
from backend.app.security.path_guard import PathGuard
|
from backend.app.security.path_guard import PathGuard
|
||||||
|
from backend.app.services.delete_task_service import DeleteTaskService
|
||||||
from backend.app.services.file_ops_service import FileOpsService
|
from backend.app.services.file_ops_service import FileOpsService
|
||||||
|
from backend.app.services.task_service import TaskService
|
||||||
|
from backend.app.tasks_runner import TaskRunner
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingDeleteFilesystemAdapter(FilesystemAdapter):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.entered = threading.Event()
|
||||||
|
self.release = threading.Event()
|
||||||
|
|
||||||
|
def delete_file(self, path: Path) -> None:
|
||||||
|
self.entered.set()
|
||||||
|
self.release.wait(timeout=2.0)
|
||||||
|
super().delete_file(path)
|
||||||
|
|
||||||
|
|
||||||
class FileOpsApiGoldenTest(unittest.TestCase):
|
class FileOpsApiGoldenTest(unittest.TestCase):
|
||||||
@@ -22,21 +40,37 @@ class FileOpsApiGoldenTest(unittest.TestCase):
|
|||||||
self.temp_dir = tempfile.TemporaryDirectory()
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
self.root = Path(self.temp_dir.name) / "root"
|
self.root = Path(self.temp_dir.name) / "root"
|
||||||
self.root.mkdir(parents=True, exist_ok=True)
|
self.root.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.repo = TaskRepository(str(Path(self.temp_dir.name) / "tasks.db"))
|
||||||
|
|
||||||
self.scope = self.root / "scope"
|
self.scope = self.root / "scope"
|
||||||
self.scope.mkdir(parents=True, exist_ok=True)
|
self.scope.mkdir(parents=True, exist_ok=True)
|
||||||
(self.scope / "old.txt").write_text("x", encoding="utf-8")
|
(self.scope / "old.txt").write_text("x", encoding="utf-8")
|
||||||
(self.scope / "existing.txt").write_text("y", encoding="utf-8")
|
(self.scope / "existing.txt").write_text("y", encoding="utf-8")
|
||||||
|
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root)})
|
||||||
service = FileOpsService(
|
service = FileOpsService(
|
||||||
path_guard=PathGuard({"storage1": str(self.root)}),
|
path_guard=path_guard,
|
||||||
filesystem=FilesystemAdapter(),
|
filesystem=FilesystemAdapter(),
|
||||||
)
|
)
|
||||||
|
delete_service = DeleteTaskService(
|
||||||
|
path_guard=path_guard,
|
||||||
|
repository=self.repo,
|
||||||
|
runner=TaskRunner(repository=self.repo, filesystem=FilesystemAdapter()),
|
||||||
|
)
|
||||||
|
task_service = TaskService(repository=self.repo)
|
||||||
|
|
||||||
async def _override_file_ops_service() -> FileOpsService:
|
async def _override_file_ops_service() -> FileOpsService:
|
||||||
return service
|
return service
|
||||||
|
|
||||||
|
async def _override_delete_task_service() -> DeleteTaskService:
|
||||||
|
return delete_service
|
||||||
|
|
||||||
|
async def _override_task_service() -> TaskService:
|
||||||
|
return task_service
|
||||||
|
|
||||||
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||||
|
app.dependency_overrides[get_delete_task_service] = _override_delete_task_service
|
||||||
|
app.dependency_overrides[get_task_service] = _override_task_service
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
app.dependency_overrides.clear()
|
app.dependency_overrides.clear()
|
||||||
@@ -50,6 +84,34 @@ class FileOpsApiGoldenTest(unittest.TestCase):
|
|||||||
|
|
||||||
return asyncio.run(_run())
|
return asyncio.run(_run())
|
||||||
|
|
||||||
|
def _get(self, url: str) -> httpx.Response:
|
||||||
|
async def _run() -> httpx.Response:
|
||||||
|
transport = httpx.ASGITransport(app=app)
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||||
|
return await client.get(url)
|
||||||
|
|
||||||
|
return asyncio.run(_run())
|
||||||
|
|
||||||
|
def _wait_task(self, task_id: str, timeout_s: float = 2.0) -> dict:
|
||||||
|
deadline = time.time() + timeout_s
|
||||||
|
while time.time() < deadline:
|
||||||
|
response = self._get(f"/api/tasks/{task_id}")
|
||||||
|
body = response.json()
|
||||||
|
if body["status"] in {"completed", "failed", "cancelled"}:
|
||||||
|
return body
|
||||||
|
time.sleep(0.02)
|
||||||
|
self.fail("task did not reach terminal state in time")
|
||||||
|
|
||||||
|
def _wait_for_status(self, task_id: str, statuses: set[str], timeout_s: float = 2.0) -> dict:
|
||||||
|
deadline = time.time() + timeout_s
|
||||||
|
while time.time() < deadline:
|
||||||
|
response = self._get(f"/api/tasks/{task_id}")
|
||||||
|
body = response.json()
|
||||||
|
if body["status"] in statuses:
|
||||||
|
return body
|
||||||
|
time.sleep(0.02)
|
||||||
|
self.fail(f"task did not reach one of {sorted(statuses)} in time")
|
||||||
|
|
||||||
def test_mkdir_success(self) -> None:
|
def test_mkdir_success(self) -> None:
|
||||||
response = self._post(
|
response = self._post(
|
||||||
"/api/files/mkdir",
|
"/api/files/mkdir",
|
||||||
@@ -225,8 +287,63 @@ class FileOpsApiGoldenTest(unittest.TestCase):
|
|||||||
{"path": "storage1/scope/delete_me.txt"},
|
{"path": "storage1/scope/delete_me.txt"},
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 202)
|
||||||
self.assertEqual(response.json(), {"path": "storage1/scope/delete_me.txt"})
|
body = response.json()
|
||||||
|
self.assertEqual(body["status"], "queued")
|
||||||
|
detail = self._wait_task(body["task_id"])
|
||||||
|
self.assertEqual(detail["operation"], "delete")
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 1)
|
||||||
|
self.assertIsNone(detail["current_item"])
|
||||||
|
self.assertFalse(target.exists())
|
||||||
|
|
||||||
|
def test_delete_file_cancelled_after_current_delete_finishes(self) -> None:
|
||||||
|
blocking_fs = BlockingDeleteFilesystemAdapter()
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root)})
|
||||||
|
service = FileOpsService(path_guard=path_guard, filesystem=blocking_fs)
|
||||||
|
delete_service = DeleteTaskService(
|
||||||
|
path_guard=path_guard,
|
||||||
|
repository=self.repo,
|
||||||
|
runner=TaskRunner(repository=self.repo, filesystem=blocking_fs),
|
||||||
|
)
|
||||||
|
task_service = TaskService(repository=self.repo)
|
||||||
|
|
||||||
|
async def _override_file_ops_service() -> FileOpsService:
|
||||||
|
return service
|
||||||
|
|
||||||
|
async def _override_delete_task_service() -> DeleteTaskService:
|
||||||
|
return delete_service
|
||||||
|
|
||||||
|
async def _override_task_service() -> TaskService:
|
||||||
|
return task_service
|
||||||
|
|
||||||
|
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||||
|
app.dependency_overrides[get_delete_task_service] = _override_delete_task_service
|
||||||
|
app.dependency_overrides[get_task_service] = _override_task_service
|
||||||
|
|
||||||
|
target = self.scope / "delete_later.txt"
|
||||||
|
target.write_text("z", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._post(
|
||||||
|
"/api/files/delete",
|
||||||
|
{"path": "storage1/scope/delete_later.txt"},
|
||||||
|
)
|
||||||
|
|
||||||
|
task_id = response.json()["task_id"]
|
||||||
|
self.assertTrue(blocking_fs.entered.wait(timeout=2.0))
|
||||||
|
running = self._wait_for_status(task_id, {"running"})
|
||||||
|
self.assertEqual(running["current_item"], "delete_later.txt")
|
||||||
|
|
||||||
|
cancel_response = self._post(f"/api/tasks/{task_id}/cancel", {})
|
||||||
|
self.assertEqual(cancel_response.status_code, 200)
|
||||||
|
self.assertEqual(cancel_response.json()["status"], "cancelling")
|
||||||
|
|
||||||
|
blocking_fs.release.set()
|
||||||
|
detail = self._wait_task(task_id)
|
||||||
|
self.assertEqual(detail["status"], "cancelled")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 1)
|
||||||
self.assertFalse(target.exists())
|
self.assertFalse(target.exists())
|
||||||
|
|
||||||
def test_delete_empty_directory_success(self) -> None:
|
def test_delete_empty_directory_success(self) -> None:
|
||||||
@@ -238,8 +355,15 @@ class FileOpsApiGoldenTest(unittest.TestCase):
|
|||||||
{"path": "storage1/scope/empty_dir"},
|
{"path": "storage1/scope/empty_dir"},
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 202)
|
||||||
self.assertEqual(response.json(), {"path": "storage1/scope/empty_dir"})
|
body = response.json()
|
||||||
|
self.assertEqual(body["status"], "queued")
|
||||||
|
detail = self._wait_task(body["task_id"])
|
||||||
|
self.assertEqual(detail["operation"], "delete")
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 0)
|
||||||
|
self.assertEqual(detail["total_items"], 0)
|
||||||
|
self.assertIsNone(detail["current_item"])
|
||||||
self.assertFalse(target.exists())
|
self.assertFalse(target.exists())
|
||||||
|
|
||||||
def test_delete_not_found(self) -> None:
|
def test_delete_not_found(self) -> None:
|
||||||
@@ -300,6 +424,188 @@ class FileOpsApiGoldenTest(unittest.TestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_delete_non_empty_directory_recursive_success(self) -> None:
|
||||||
|
target = self.scope / "non_empty_recursive"
|
||||||
|
target.mkdir()
|
||||||
|
nested = target / "nested"
|
||||||
|
nested.mkdir()
|
||||||
|
(nested / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
(target / "b.txt").write_text("b", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._post(
|
||||||
|
"/api/files/delete",
|
||||||
|
{"path": "storage1/scope/non_empty_recursive", "recursive": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["status"], "queued")
|
||||||
|
detail = self._wait_task(body["task_id"])
|
||||||
|
self.assertEqual(detail["operation"], "delete")
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertIsNone(detail["current_item"])
|
||||||
|
self.assertFalse(target.exists())
|
||||||
|
|
||||||
|
def test_delete_non_empty_directory_recursive_cancelled_after_current_file_finishes(self) -> None:
|
||||||
|
blocking_fs = BlockingDeleteFilesystemAdapter()
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root)})
|
||||||
|
service = FileOpsService(path_guard=path_guard, filesystem=blocking_fs)
|
||||||
|
delete_service = DeleteTaskService(
|
||||||
|
path_guard=path_guard,
|
||||||
|
repository=self.repo,
|
||||||
|
runner=TaskRunner(repository=self.repo, filesystem=blocking_fs),
|
||||||
|
)
|
||||||
|
task_service = TaskService(repository=self.repo)
|
||||||
|
|
||||||
|
async def _override_file_ops_service() -> FileOpsService:
|
||||||
|
return service
|
||||||
|
|
||||||
|
async def _override_delete_task_service() -> DeleteTaskService:
|
||||||
|
return delete_service
|
||||||
|
|
||||||
|
async def _override_task_service() -> TaskService:
|
||||||
|
return task_service
|
||||||
|
|
||||||
|
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||||
|
app.dependency_overrides[get_delete_task_service] = _override_delete_task_service
|
||||||
|
app.dependency_overrides[get_task_service] = _override_task_service
|
||||||
|
|
||||||
|
target = self.scope / "delete_recursive_later"
|
||||||
|
target.mkdir()
|
||||||
|
nested = target / "nested"
|
||||||
|
nested.mkdir()
|
||||||
|
(target / "a.txt").write_text("a", encoding="utf-8")
|
||||||
|
(nested / "b.txt").write_text("b", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._post(
|
||||||
|
"/api/files/delete",
|
||||||
|
{"path": "storage1/scope/delete_recursive_later", "recursive": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
task_id = response.json()["task_id"]
|
||||||
|
self.assertTrue(blocking_fs.entered.wait(timeout=2.0))
|
||||||
|
running = self._wait_for_status(task_id, {"running"})
|
||||||
|
self.assertEqual(running["current_item"], "a.txt")
|
||||||
|
self.assertEqual(running["done_items"], 0)
|
||||||
|
self.assertEqual(running["total_items"], 2)
|
||||||
|
|
||||||
|
cancel_response = self._post(f"/api/tasks/{task_id}/cancel", {})
|
||||||
|
self.assertEqual(cancel_response.status_code, 200)
|
||||||
|
self.assertEqual(cancel_response.json()["status"], "cancelling")
|
||||||
|
|
||||||
|
blocking_fs.release.set()
|
||||||
|
detail = self._wait_task(task_id)
|
||||||
|
self.assertEqual(detail["status"], "cancelled")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertFalse(target.joinpath("a.txt").exists())
|
||||||
|
self.assertTrue(target.joinpath("nested", "b.txt").exists())
|
||||||
|
self.assertTrue(target.exists())
|
||||||
|
|
||||||
|
def test_delete_batch_multi_select_starts_one_task_and_completes(self) -> None:
|
||||||
|
first = self.scope / "batch-a.txt"
|
||||||
|
second_dir = self.scope / "batch-dir"
|
||||||
|
second_nested = second_dir / "nested.txt"
|
||||||
|
first.write_text("a", encoding="utf-8")
|
||||||
|
second_dir.mkdir()
|
||||||
|
second_nested.write_text("b", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._post(
|
||||||
|
"/api/files/delete",
|
||||||
|
{
|
||||||
|
"paths": ["storage1/scope/batch-a.txt", "storage1/scope/batch-dir"],
|
||||||
|
"recursive_paths": ["storage1/scope/batch-dir"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["operation"], "delete")
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["source"], "2 items")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertFalse(first.exists())
|
||||||
|
self.assertFalse(second_dir.exists())
|
||||||
|
|
||||||
|
def test_delete_batch_cancelled_after_current_delete_finishes(self) -> None:
|
||||||
|
blocking_fs = BlockingDeleteFilesystemAdapter()
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root)})
|
||||||
|
service = FileOpsService(path_guard=path_guard, filesystem=blocking_fs)
|
||||||
|
delete_service = DeleteTaskService(
|
||||||
|
path_guard=path_guard,
|
||||||
|
repository=self.repo,
|
||||||
|
runner=TaskRunner(repository=self.repo, filesystem=blocking_fs),
|
||||||
|
)
|
||||||
|
task_service = TaskService(repository=self.repo)
|
||||||
|
|
||||||
|
async def _override_file_ops_service() -> FileOpsService:
|
||||||
|
return service
|
||||||
|
|
||||||
|
async def _override_delete_task_service() -> DeleteTaskService:
|
||||||
|
return delete_service
|
||||||
|
|
||||||
|
async def _override_task_service() -> TaskService:
|
||||||
|
return task_service
|
||||||
|
|
||||||
|
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||||
|
app.dependency_overrides[get_delete_task_service] = _override_delete_task_service
|
||||||
|
app.dependency_overrides[get_task_service] = _override_task_service
|
||||||
|
|
||||||
|
first = self.scope / "cancel-a.txt"
|
||||||
|
second = self.scope / "cancel-b.txt"
|
||||||
|
first.write_text("a", encoding="utf-8")
|
||||||
|
second.write_text("b", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._post(
|
||||||
|
"/api/files/delete",
|
||||||
|
{
|
||||||
|
"paths": ["storage1/scope/cancel-a.txt", "storage1/scope/cancel-b.txt"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
task_id = response.json()["task_id"]
|
||||||
|
self.assertTrue(blocking_fs.entered.wait(timeout=2.0))
|
||||||
|
running = self._wait_for_status(task_id, {"running"})
|
||||||
|
self.assertEqual(running["done_items"], 0)
|
||||||
|
self.assertEqual(running["total_items"], 2)
|
||||||
|
|
||||||
|
cancel_response = self._post(f"/api/tasks/{task_id}/cancel", {})
|
||||||
|
self.assertEqual(cancel_response.status_code, 200)
|
||||||
|
self.assertEqual(cancel_response.json()["status"], "cancelling")
|
||||||
|
|
||||||
|
blocking_fs.release.set()
|
||||||
|
detail = self._wait_task(task_id)
|
||||||
|
self.assertEqual(detail["status"], "cancelled")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertFalse(first.exists())
|
||||||
|
self.assertTrue(second.exists())
|
||||||
|
|
||||||
|
def test_delete_batch_directory_only_empty_dirs_remains_honestly_coarse(self) -> None:
|
||||||
|
first = self.scope / "empty-a"
|
||||||
|
second = self.scope / "empty-b"
|
||||||
|
first.mkdir()
|
||||||
|
second.mkdir()
|
||||||
|
|
||||||
|
response = self._post(
|
||||||
|
"/api/files/delete",
|
||||||
|
{
|
||||||
|
"paths": ["storage1/scope/empty-a", "storage1/scope/empty-b"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 0)
|
||||||
|
self.assertEqual(detail["total_items"], 0)
|
||||||
|
self.assertIsNone(detail["current_item"])
|
||||||
|
self.assertFalse(first.exists())
|
||||||
|
self.assertFalse(second.exists())
|
||||||
|
|
||||||
def test_delete_invalid_path(self) -> None:
|
def test_delete_invalid_path(self) -> None:
|
||||||
response = self._post(
|
response = self._post(
|
||||||
"/api/files/delete",
|
"/api/files/delete",
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import threading
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -11,13 +12,16 @@ import httpx
|
|||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||||
|
|
||||||
from backend.app.dependencies import get_copy_task_service, get_file_ops_service, get_history_service, get_move_task_service, get_task_service
|
from backend.app.dependencies import get_archive_download_task_service, get_copy_task_service, get_delete_task_service, get_duplicate_task_service, get_file_ops_service, get_history_service, get_move_task_service, get_task_service
|
||||||
from backend.app.db.history_repository import HistoryRepository
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
from backend.app.db.task_repository import TaskRepository
|
from backend.app.db.task_repository import TaskRepository
|
||||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||||
from backend.app.main import app
|
from backend.app.main import app
|
||||||
from backend.app.security.path_guard import PathGuard
|
from backend.app.security.path_guard import PathGuard
|
||||||
|
from backend.app.services.archive_download_task_service import ArchiveDownloadTaskService
|
||||||
from backend.app.services.copy_task_service import CopyTaskService
|
from backend.app.services.copy_task_service import CopyTaskService
|
||||||
|
from backend.app.services.delete_task_service import DeleteTaskService
|
||||||
|
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
||||||
from backend.app.services.file_ops_service import FileOpsService
|
from backend.app.services.file_ops_service import FileOpsService
|
||||||
from backend.app.services.history_service import HistoryService
|
from backend.app.services.history_service import HistoryService
|
||||||
from backend.app.services.move_task_service import MoveTaskService
|
from backend.app.services.move_task_service import MoveTaskService
|
||||||
@@ -30,6 +34,33 @@ class FailingCopyFilesystemAdapter(FilesystemAdapter):
|
|||||||
raise OSError('forced copy failure')
|
raise OSError('forced copy failure')
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingArchiveBuildFileOpsService(FileOpsService):
|
||||||
|
def __init__(self, *args, entered: threading.Event, release: threading.Event, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._entered = entered
|
||||||
|
self._release = release
|
||||||
|
|
||||||
|
def _write_download_target_to_zip(self, archive, resolved_target, on_each_item=None) -> None:
|
||||||
|
archive.writestr("partial.txt", b"partial")
|
||||||
|
self._entered.set()
|
||||||
|
self._release.wait(timeout=2.0)
|
||||||
|
if on_each_item:
|
||||||
|
on_each_item()
|
||||||
|
super()._write_download_target_to_zip(archive, resolved_target, on_each_item=on_each_item)
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingCopyFilesystemAdapter(FilesystemAdapter):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.entered = threading.Event()
|
||||||
|
self.release = threading.Event()
|
||||||
|
|
||||||
|
def copy_file(self, source: str, destination: str, on_progress=None) -> None:
|
||||||
|
self.entered.set()
|
||||||
|
self.release.wait(timeout=2.0)
|
||||||
|
return super().copy_file(source=source, destination=destination, on_progress=on_progress)
|
||||||
|
|
||||||
|
|
||||||
class HistoryApiGoldenTest(unittest.TestCase):
|
class HistoryApiGoldenTest(unittest.TestCase):
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.temp_dir = tempfile.TemporaryDirectory()
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
@@ -38,6 +69,7 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
|||||||
self.root1.mkdir(parents=True, exist_ok=True)
|
self.root1.mkdir(parents=True, exist_ok=True)
|
||||||
self.root2.mkdir(parents=True, exist_ok=True)
|
self.root2.mkdir(parents=True, exist_ok=True)
|
||||||
db_path = str(Path(self.temp_dir.name) / 'tasks.db')
|
db_path = str(Path(self.temp_dir.name) / 'tasks.db')
|
||||||
|
self.artifact_root = Path(self.temp_dir.name) / "archive_tmp"
|
||||||
self.task_repo = TaskRepository(db_path)
|
self.task_repo = TaskRepository(db_path)
|
||||||
self.history_repo = HistoryRepository(db_path)
|
self.history_repo = HistoryRepository(db_path)
|
||||||
self.path_guard = PathGuard({'storage1': str(self.root1), 'storage2': str(self.root2)})
|
self.path_guard = PathGuard({'storage1': str(self.root1), 'storage2': str(self.root2)})
|
||||||
@@ -47,20 +79,39 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
|||||||
app.dependency_overrides.clear()
|
app.dependency_overrides.clear()
|
||||||
self.temp_dir.cleanup()
|
self.temp_dir.cleanup()
|
||||||
|
|
||||||
def _set_services(self, filesystem: FilesystemAdapter) -> None:
|
def _set_services(self, filesystem: FilesystemAdapter, file_ops_service: FileOpsService | None = None) -> None:
|
||||||
runner = TaskRunner(repository=self.task_repo, filesystem=filesystem, history_repository=self.history_repo)
|
runner = TaskRunner(repository=self.task_repo, filesystem=filesystem, history_repository=self.history_repo)
|
||||||
file_ops_service = FileOpsService(path_guard=self.path_guard, filesystem=filesystem, history_repository=self.history_repo)
|
file_ops_service = file_ops_service or FileOpsService(path_guard=self.path_guard, filesystem=filesystem, history_repository=self.history_repo)
|
||||||
|
archive_service = ArchiveDownloadTaskService(
|
||||||
|
path_guard=self.path_guard,
|
||||||
|
repository=self.task_repo,
|
||||||
|
runner=runner,
|
||||||
|
history_repository=self.history_repo,
|
||||||
|
file_ops_service=file_ops_service,
|
||||||
|
artifact_root=self.artifact_root,
|
||||||
|
)
|
||||||
copy_service = CopyTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
copy_service = CopyTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
||||||
|
delete_service = DeleteTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
||||||
|
duplicate_service = DuplicateTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
||||||
move_service = MoveTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
move_service = MoveTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
||||||
task_service = TaskService(repository=self.task_repo)
|
task_service = TaskService(repository=self.task_repo, history_repository=self.history_repo)
|
||||||
history_service = HistoryService(repository=self.history_repo)
|
history_service = HistoryService(repository=self.history_repo)
|
||||||
|
|
||||||
async def _override_file_ops_service() -> FileOpsService:
|
async def _override_file_ops_service() -> FileOpsService:
|
||||||
return file_ops_service
|
return file_ops_service
|
||||||
|
|
||||||
|
async def _override_archive_service() -> ArchiveDownloadTaskService:
|
||||||
|
return archive_service
|
||||||
|
|
||||||
async def _override_copy_service() -> CopyTaskService:
|
async def _override_copy_service() -> CopyTaskService:
|
||||||
return copy_service
|
return copy_service
|
||||||
|
|
||||||
|
async def _override_duplicate_service() -> DuplicateTaskService:
|
||||||
|
return duplicate_service
|
||||||
|
|
||||||
|
async def _override_delete_service() -> DeleteTaskService:
|
||||||
|
return delete_service
|
||||||
|
|
||||||
async def _override_move_service() -> MoveTaskService:
|
async def _override_move_service() -> MoveTaskService:
|
||||||
return move_service
|
return move_service
|
||||||
|
|
||||||
@@ -71,7 +122,10 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
|||||||
return history_service
|
return history_service
|
||||||
|
|
||||||
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||||
|
app.dependency_overrides[get_archive_download_task_service] = _override_archive_service
|
||||||
app.dependency_overrides[get_copy_task_service] = _override_copy_service
|
app.dependency_overrides[get_copy_task_service] = _override_copy_service
|
||||||
|
app.dependency_overrides[get_delete_task_service] = _override_delete_service
|
||||||
|
app.dependency_overrides[get_duplicate_task_service] = _override_duplicate_service
|
||||||
app.dependency_overrides[get_move_task_service] = _override_move_service
|
app.dependency_overrides[get_move_task_service] = _override_move_service
|
||||||
app.dependency_overrides[get_task_service] = _override_task_service
|
app.dependency_overrides[get_task_service] = _override_task_service
|
||||||
app.dependency_overrides[get_history_service] = _override_history_service
|
app.dependency_overrides[get_history_service] = _override_history_service
|
||||||
@@ -91,11 +145,21 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
|||||||
while time.time() < deadline:
|
while time.time() < deadline:
|
||||||
response = self._request('GET', f'/api/tasks/{task_id}')
|
response = self._request('GET', f'/api/tasks/{task_id}')
|
||||||
body = response.json()
|
body = response.json()
|
||||||
if body['status'] in {'completed', 'failed'}:
|
if body['status'] in {'completed', 'failed', 'ready', 'cancelled'}:
|
||||||
return body
|
return body
|
||||||
time.sleep(0.02)
|
time.sleep(0.02)
|
||||||
self.fail('task did not reach terminal state in time')
|
self.fail('task did not reach terminal state in time')
|
||||||
|
|
||||||
|
def _wait_for_status(self, task_id: str, statuses: set[str], timeout_s: float = 2.0) -> dict:
|
||||||
|
deadline = time.time() + timeout_s
|
||||||
|
while time.time() < deadline:
|
||||||
|
response = self._request('GET', f'/api/tasks/{task_id}')
|
||||||
|
body = response.json()
|
||||||
|
if body['status'] in statuses:
|
||||||
|
return body
|
||||||
|
time.sleep(0.02)
|
||||||
|
self.fail(f"task did not reach one of {sorted(statuses)} in time")
|
||||||
|
|
||||||
def test_get_history_empty_list(self) -> None:
|
def test_get_history_empty_list(self) -> None:
|
||||||
response = self._request('GET', '/api/history')
|
response = self._request('GET', '/api/history')
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
@@ -165,6 +229,35 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertEqual(history[0]['source'], 'storage1/source.txt')
|
self.assertEqual(history[0]['source'], 'storage1/source.txt')
|
||||||
self.assertEqual(history[0]['destination'], 'storage1/copied.txt')
|
self.assertEqual(history[0]['destination'], 'storage1/copied.txt')
|
||||||
|
|
||||||
|
def test_copy_cancelled_history_item(self) -> None:
|
||||||
|
blocking_fs = BlockingCopyFilesystemAdapter()
|
||||||
|
self._set_services(blocking_fs)
|
||||||
|
(self.root1 / 'a.txt').write_text('A', encoding='utf-8')
|
||||||
|
(self.root1 / 'b.txt').write_text('B', encoding='utf-8')
|
||||||
|
(self.root1 / 'dest').mkdir()
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
'POST',
|
||||||
|
'/api/files/copy',
|
||||||
|
{'sources': ['storage1/a.txt', 'storage1/b.txt'], 'destination_base': 'storage1/dest'},
|
||||||
|
)
|
||||||
|
|
||||||
|
task_id = response.json()['task_id']
|
||||||
|
self.assertTrue(blocking_fs.entered.wait(timeout=2.0))
|
||||||
|
self._wait_for_status(task_id, {'running'})
|
||||||
|
cancel_response = self._request('POST', f'/api/tasks/{task_id}/cancel')
|
||||||
|
self.assertEqual(cancel_response.status_code, 200)
|
||||||
|
self.assertEqual(cancel_response.json()['status'], 'cancelling')
|
||||||
|
blocking_fs.release.set()
|
||||||
|
detail = self._wait_task(task_id)
|
||||||
|
|
||||||
|
self.assertEqual(detail['status'], 'cancelled')
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
self.assertEqual(history[0]['operation'], 'copy')
|
||||||
|
self.assertEqual(history[0]['status'], 'cancelled')
|
||||||
|
self.assertEqual(history[0]['source'], '2 items')
|
||||||
|
self.assertEqual(history[0]['destination'], 'storage1/dest')
|
||||||
|
|
||||||
def test_move_failed_history_item(self) -> None:
|
def test_move_failed_history_item(self) -> None:
|
||||||
src = self.root1 / 'source.txt'
|
src = self.root1 / 'source.txt'
|
||||||
src.write_text('hello', encoding='utf-8')
|
src.write_text('hello', encoding='utf-8')
|
||||||
@@ -178,3 +271,152 @@ class HistoryApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertEqual(history[0]['operation'], 'move')
|
self.assertEqual(history[0]['operation'], 'move')
|
||||||
self.assertEqual(history[0]['status'], 'failed')
|
self.assertEqual(history[0]['status'], 'failed')
|
||||||
self.assertEqual(history[0]['error_code'], 'io_error')
|
self.assertEqual(history[0]['error_code'], 'io_error')
|
||||||
|
|
||||||
|
def test_duplicate_completed_history_item(self) -> None:
|
||||||
|
(self.root1 / 'report.txt').write_text('hello', encoding='utf-8')
|
||||||
|
|
||||||
|
response = self._request('POST', '/api/files/duplicate', {'paths': ['storage1/report.txt']})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
self._wait_task(response.json()['task_id'])
|
||||||
|
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
self.assertEqual(history[0]['operation'], 'duplicate')
|
||||||
|
self.assertEqual(history[0]['status'], 'completed')
|
||||||
|
self.assertEqual(history[0]['source'], 'storage1/report.txt')
|
||||||
|
self.assertEqual(history[0]['destination'], 'storage1/report copy.txt')
|
||||||
|
|
||||||
|
def test_delete_completed_history_item(self) -> None:
|
||||||
|
(self.root1 / 'trash.txt').write_text('bye', encoding='utf-8')
|
||||||
|
|
||||||
|
response = self._request('POST', '/api/files/delete', {'path': 'storage1/trash.txt'})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
self._wait_task(response.json()['task_id'])
|
||||||
|
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
self.assertEqual(history[0]['operation'], 'delete')
|
||||||
|
self.assertEqual(history[0]['status'], 'completed')
|
||||||
|
self.assertEqual(history[0]['path'], 'storage1/trash.txt')
|
||||||
|
|
||||||
|
def test_delete_batch_completed_history_item(self) -> None:
|
||||||
|
(self.root1 / 'trash-a.txt').write_text('a', encoding='utf-8')
|
||||||
|
(self.root1 / 'trash-b.txt').write_text('b', encoding='utf-8')
|
||||||
|
|
||||||
|
response = self._request('POST', '/api/files/delete', {'paths': ['storage1/trash-a.txt', 'storage1/trash-b.txt']})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
self._wait_task(response.json()['task_id'])
|
||||||
|
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
self.assertEqual(history[0]['operation'], 'delete')
|
||||||
|
self.assertEqual(history[0]['status'], 'completed')
|
||||||
|
self.assertEqual(history[0]['path'], '2 items')
|
||||||
|
|
||||||
|
def test_single_file_download_writes_ready_history_item(self) -> None:
|
||||||
|
(self.root1 / 'report.txt').write_text('hello download', encoding='utf-8')
|
||||||
|
|
||||||
|
response = self._request('GET', '/api/files/download?path=storage1/report.txt')
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
self.assertEqual(history[0]['operation'], 'download')
|
||||||
|
self.assertEqual(history[0]['status'], 'ready')
|
||||||
|
self.assertEqual(history[0]['source'], 'single_file')
|
||||||
|
self.assertEqual(history[0]['path'], 'storage1/report.txt')
|
||||||
|
self.assertEqual(history[0]['destination'], 'report.txt')
|
||||||
|
self.assertEqual(history[0]['error_code'], None)
|
||||||
|
self.assertEqual(history[0]['error_message'], None)
|
||||||
|
|
||||||
|
def test_single_directory_zip_download_writes_ready_history_item(self) -> None:
|
||||||
|
(self.root1 / 'docs').mkdir()
|
||||||
|
(self.root1 / 'docs' / 'a.txt').write_text('A', encoding='utf-8')
|
||||||
|
|
||||||
|
response = self._request('POST', '/api/files/download/archive-prepare', {'paths': ['storage1/docs']})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
self._wait_task(response.json()['task_id'])
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
self.assertEqual(history[0]['operation'], 'download')
|
||||||
|
self.assertEqual(history[0]['status'], 'ready')
|
||||||
|
self.assertEqual(history[0]['source'], 'single_directory_zip')
|
||||||
|
self.assertEqual(history[0]['path'], 'storage1/docs')
|
||||||
|
self.assertEqual(history[0]['destination'], 'docs.zip')
|
||||||
|
|
||||||
|
def test_multi_mixed_zip_download_writes_ready_history_item(self) -> None:
|
||||||
|
(self.root1 / 'readme.txt').write_text('R', encoding='utf-8')
|
||||||
|
(self.root1 / 'photos').mkdir()
|
||||||
|
(self.root1 / 'photos' / 'img.txt').write_text('P', encoding='utf-8')
|
||||||
|
|
||||||
|
response = self._request('POST', '/api/files/download/archive-prepare', {'paths': ['storage1/readme.txt', 'storage1/photos']})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
self._wait_task(response.json()['task_id'])
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
self.assertEqual(history[0]['operation'], 'download')
|
||||||
|
self.assertEqual(history[0]['status'], 'ready')
|
||||||
|
self.assertEqual(history[0]['source'], 'multi_zip')
|
||||||
|
self.assertEqual(history[0]['path'], 'storage1/readme.txt, storage1/photos')
|
||||||
|
self.assertRegex(history[0]['destination'], r'^kodidownload-\d{8}-\d{6}\.zip$')
|
||||||
|
|
||||||
|
def test_download_preflight_failure_writes_preflight_failed_history_item(self) -> None:
|
||||||
|
target = self.root1 / 'real.txt'
|
||||||
|
target.write_text('x', encoding='utf-8')
|
||||||
|
(self.root1 / 'docs').mkdir()
|
||||||
|
(self.root1 / 'docs' / 'link.txt').symlink_to(target)
|
||||||
|
|
||||||
|
response = self._request('POST', '/api/files/download/archive-prepare', {'paths': ['storage1/docs']})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
self._wait_task(response.json()['task_id'])
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
self.assertEqual(history[0]['operation'], 'download')
|
||||||
|
self.assertEqual(history[0]['status'], 'failed')
|
||||||
|
self.assertEqual(history[0]['source'], 'single_directory_zip')
|
||||||
|
self.assertEqual(history[0]['path'], 'storage1/docs')
|
||||||
|
self.assertEqual(history[0]['destination'], 'docs.zip')
|
||||||
|
self.assertEqual(history[0]['error_code'], 'download_preflight_failed')
|
||||||
|
self.assertEqual(history[0]['error_message'], 'Zip download preflight failed')
|
||||||
|
|
||||||
|
def test_download_cancellation_writes_cancelled_history_item(self) -> None:
|
||||||
|
entered = threading.Event()
|
||||||
|
release = threading.Event()
|
||||||
|
file_ops_service = BlockingArchiveBuildFileOpsService(
|
||||||
|
path_guard=self.path_guard,
|
||||||
|
filesystem=FilesystemAdapter(),
|
||||||
|
history_repository=self.history_repo,
|
||||||
|
entered=entered,
|
||||||
|
release=release,
|
||||||
|
)
|
||||||
|
self._set_services(FilesystemAdapter(), file_ops_service=file_ops_service)
|
||||||
|
(self.root1 / 'docs').mkdir()
|
||||||
|
(self.root1 / 'docs' / 'a.txt').write_text('A', encoding='utf-8')
|
||||||
|
|
||||||
|
response = self._request('POST', '/api/files/download/archive-prepare', {'paths': ['storage1/docs']})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
self.assertTrue(entered.wait(timeout=2.0))
|
||||||
|
cancel = self._request('POST', f"/api/files/download/archive/{response.json()['task_id']}/cancel")
|
||||||
|
release.set()
|
||||||
|
self._wait_task(response.json()['task_id'])
|
||||||
|
time.sleep(0.05)
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
|
||||||
|
self.assertEqual(cancel.status_code, 200)
|
||||||
|
self.assertEqual(history[0]['operation'], 'download')
|
||||||
|
self.assertEqual(history[0]['status'], 'cancelled')
|
||||||
|
self.assertEqual(history[0]['source'], 'single_directory_zip')
|
||||||
|
self.assertEqual(history[0]['path'], 'storage1/docs')
|
||||||
|
self.assertEqual(history[0]['destination'], 'docs.zip')
|
||||||
|
self.assertEqual(history[0]['error_code'], None)
|
||||||
|
self.assertEqual(history[0]['error_message'], None)
|
||||||
|
|
||||||
|
def test_download_history_uses_server_certain_statuses_only(self) -> None:
|
||||||
|
(self.root1 / 'report.txt').write_text('hello download', encoding='utf-8')
|
||||||
|
|
||||||
|
response = self._request('GET', '/api/files/download?path=storage1/report.txt')
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
history = self._request('GET', '/api/history').json()['items']
|
||||||
|
self.assertIn(history[0]['status'], {'requested', 'ready', 'preflight_failed', 'failed', 'cancelled'})
|
||||||
|
self.assertNotIn(history[0]['status'], {'completed', 'downloaded', 'saved'})
|
||||||
|
|||||||
@@ -0,0 +1,102 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||||
|
|
||||||
|
from backend.app.dependencies import get_file_ops_service
|
||||||
|
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||||
|
from backend.app.main import app
|
||||||
|
from backend.app.security.path_guard import PathGuard
|
||||||
|
from backend.app.services.file_ops_service import FileOpsService
|
||||||
|
|
||||||
|
|
||||||
|
PNG_1X1 = (
|
||||||
|
b"\x89PNG\r\n\x1a\n"
|
||||||
|
b"\x00\x00\x00\rIHDR"
|
||||||
|
b"\x00\x00\x00\x01\x00\x00\x00\x01\x08\x02\x00\x00\x00"
|
||||||
|
b"\x90wS\xde"
|
||||||
|
b"\x00\x00\x00\x0cIDATx\x9cc\xf8\xcf\xc0\x00\x00\x03\x01\x01\x00"
|
||||||
|
b"\xc9\xfe\x92\xef"
|
||||||
|
b"\x00\x00\x00\x00IEND\xaeB`\x82"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ImageApiGoldenTest(unittest.TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
|
self.root = Path(self.temp_dir.name) / "root"
|
||||||
|
self.root.mkdir(parents=True, exist_ok=True)
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root)})
|
||||||
|
service = FileOpsService(path_guard=path_guard, filesystem=FilesystemAdapter())
|
||||||
|
|
||||||
|
async def _override_file_ops_service() -> FileOpsService:
|
||||||
|
return service
|
||||||
|
|
||||||
|
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||||
|
|
||||||
|
def tearDown(self) -> None:
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
self.temp_dir.cleanup()
|
||||||
|
|
||||||
|
def _request(self, path: str) -> httpx.Response:
|
||||||
|
async def _run() -> httpx.Response:
|
||||||
|
transport = httpx.ASGITransport(app=app)
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||||
|
return await client.get("/api/files/image", params={"path": path})
|
||||||
|
|
||||||
|
return asyncio.run(_run())
|
||||||
|
|
||||||
|
def test_image_endpoint_success(self) -> None:
|
||||||
|
(self.root / "sample.png").write_bytes(PNG_1X1)
|
||||||
|
|
||||||
|
response = self._request("storage1/sample.png")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual(response.headers["content-type"], "image/png")
|
||||||
|
self.assertEqual(response.headers["content-length"], str(len(PNG_1X1)))
|
||||||
|
self.assertEqual(response.content, PNG_1X1)
|
||||||
|
|
||||||
|
def test_image_directory_type_conflict(self) -> None:
|
||||||
|
(self.root / "images").mkdir()
|
||||||
|
|
||||||
|
response = self._request("storage1/images")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "type_conflict")
|
||||||
|
|
||||||
|
def test_image_path_not_found(self) -> None:
|
||||||
|
response = self._request("storage1/missing.png")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 404)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "path_not_found")
|
||||||
|
|
||||||
|
def test_image_traversal_blocked(self) -> None:
|
||||||
|
response = self._request("storage1/../etc/passwd")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 403)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "path_traversal_detected")
|
||||||
|
|
||||||
|
def test_image_invalid_root_alias(self) -> None:
|
||||||
|
response = self._request("unknown/sample.png")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 403)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "invalid_root_alias")
|
||||||
|
|
||||||
|
def test_image_non_image_blocked(self) -> None:
|
||||||
|
(self.root / "notes.txt").write_text("hello", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request("storage1/notes.txt")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "unsupported_type")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import base64
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import unittest
|
import unittest
|
||||||
@@ -17,6 +18,11 @@ from backend.app.security.path_guard import PathGuard
|
|||||||
from backend.app.services.file_ops_service import FileOpsService
|
from backend.app.services.file_ops_service import FileOpsService
|
||||||
|
|
||||||
|
|
||||||
|
PNG_1X1 = base64.b64decode(
|
||||||
|
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGP4z8AAAAMBAQDJ/pLvAAAAAElFTkSuQmCC"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class FileInfoApiGoldenTest(unittest.TestCase):
|
class FileInfoApiGoldenTest(unittest.TestCase):
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.temp_dir = tempfile.TemporaryDirectory()
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
@@ -59,6 +65,8 @@ class FileInfoApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertIn("modified", payload)
|
self.assertIn("modified", payload)
|
||||||
self.assertIn("owner", payload)
|
self.assertIn("owner", payload)
|
||||||
self.assertIn("group", payload)
|
self.assertIn("group", payload)
|
||||||
|
self.assertIsNone(payload["width"])
|
||||||
|
self.assertIsNone(payload["height"])
|
||||||
|
|
||||||
def test_directory_info_success(self) -> None:
|
def test_directory_info_success(self) -> None:
|
||||||
directory = self.root / "Media"
|
directory = self.root / "Media"
|
||||||
@@ -74,6 +82,20 @@ class FileInfoApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertIsNone(payload["size"])
|
self.assertIsNone(payload["size"])
|
||||||
self.assertEqual(payload["root"], "storage1")
|
self.assertEqual(payload["root"], "storage1")
|
||||||
self.assertIsNone(payload["extension"])
|
self.assertIsNone(payload["extension"])
|
||||||
|
self.assertIsNone(payload["width"])
|
||||||
|
self.assertIsNone(payload["height"])
|
||||||
|
|
||||||
|
def test_image_info_has_width_and_height(self) -> None:
|
||||||
|
file_path = self.root / "pixel.png"
|
||||||
|
file_path.write_bytes(PNG_1X1)
|
||||||
|
|
||||||
|
response = self._request("storage1/pixel.png")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
payload = response.json()
|
||||||
|
self.assertEqual(payload["width"], 1)
|
||||||
|
self.assertEqual(payload["height"], 1)
|
||||||
|
self.assertEqual(payload["content_type"], "image/png")
|
||||||
|
|
||||||
def test_info_path_not_found(self) -> None:
|
def test_info_path_not_found(self) -> None:
|
||||||
response = self._request("storage1/missing.txt")
|
response = self._request("storage1/missing.txt")
|
||||||
@@ -113,6 +135,8 @@ class FileInfoApiGoldenTest(unittest.TestCase):
|
|||||||
"content_type",
|
"content_type",
|
||||||
"owner",
|
"owner",
|
||||||
"group",
|
"group",
|
||||||
|
"width",
|
||||||
|
"height",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import errno
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import threading
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -28,7 +30,8 @@ class FailingDeleteFilesystemAdapter(FilesystemAdapter):
|
|||||||
|
|
||||||
class FailingBatchFilesystemAdapter(FilesystemAdapter):
|
class FailingBatchFilesystemAdapter(FilesystemAdapter):
|
||||||
def move_file(self, source: str, destination: str) -> None:
|
def move_file(self, source: str, destination: str) -> None:
|
||||||
if Path(source).name == "fail-file.txt":
|
source_path = Path(source)
|
||||||
|
if source_path.name == "fail-file.txt" or "fail-dir" in source_path.parts:
|
||||||
raise OSError("forced batch move failure")
|
raise OSError("forced batch move failure")
|
||||||
super().move_file(source, destination)
|
super().move_file(source, destination)
|
||||||
|
|
||||||
@@ -38,6 +41,23 @@ class FailingBatchFilesystemAdapter(FilesystemAdapter):
|
|||||||
super().move_directory(source, destination)
|
super().move_directory(source, destination)
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingMoveFilesystemAdapter(FilesystemAdapter):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.entered = threading.Event()
|
||||||
|
self.release = threading.Event()
|
||||||
|
|
||||||
|
def move_file(self, source: str, destination: str) -> None:
|
||||||
|
self.entered.set()
|
||||||
|
self.release.wait(timeout=2.0)
|
||||||
|
super().move_file(source, destination)
|
||||||
|
|
||||||
|
|
||||||
|
class CrossDeviceMoveFilesystemAdapter(FilesystemAdapter):
|
||||||
|
def move_file(self, source: str, destination: str) -> None:
|
||||||
|
raise OSError(errno.EXDEV, "Invalid cross-device link")
|
||||||
|
|
||||||
|
|
||||||
class MoveApiGoldenTest(unittest.TestCase):
|
class MoveApiGoldenTest(unittest.TestCase):
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.temp_dir = tempfile.TemporaryDirectory()
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
@@ -83,11 +103,21 @@ class MoveApiGoldenTest(unittest.TestCase):
|
|||||||
while time.time() < deadline:
|
while time.time() < deadline:
|
||||||
response = self._request("GET", f"/api/tasks/{task_id}")
|
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||||
body = response.json()
|
body = response.json()
|
||||||
if body["status"] in {"completed", "failed"}:
|
if body["status"] in {"completed", "failed", "cancelled"}:
|
||||||
return body
|
return body
|
||||||
time.sleep(0.02)
|
time.sleep(0.02)
|
||||||
self.fail("task did not reach terminal state in time")
|
self.fail("task did not reach terminal state in time")
|
||||||
|
|
||||||
|
def _wait_for_status(self, task_id: str, statuses: set[str], timeout_s: float = 2.0) -> dict:
|
||||||
|
deadline = time.time() + timeout_s
|
||||||
|
while time.time() < deadline:
|
||||||
|
response = self._request("GET", f"/api/tasks/{task_id}")
|
||||||
|
body = response.json()
|
||||||
|
if body["status"] in statuses:
|
||||||
|
return body
|
||||||
|
time.sleep(0.02)
|
||||||
|
self.fail(f"task did not reach one of {sorted(statuses)} in time")
|
||||||
|
|
||||||
def test_move_success_same_root_create_task_shape_and_completed(self) -> None:
|
def test_move_success_same_root_create_task_shape_and_completed(self) -> None:
|
||||||
src = self.root1 / "source.txt"
|
src = self.root1 / "source.txt"
|
||||||
src.write_text("hello", encoding="utf-8")
|
src.write_text("hello", encoding="utf-8")
|
||||||
@@ -105,6 +135,8 @@ class MoveApiGoldenTest(unittest.TestCase):
|
|||||||
|
|
||||||
detail = self._wait_task(body["task_id"])
|
detail = self._wait_task(body["task_id"])
|
||||||
self.assertEqual(detail["status"], "completed")
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 1)
|
||||||
self.assertTrue((self.root1 / "moved.txt").exists())
|
self.assertTrue((self.root1 / "moved.txt").exists())
|
||||||
self.assertFalse(src.exists())
|
self.assertFalse(src.exists())
|
||||||
|
|
||||||
@@ -135,6 +167,31 @@ class MoveApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertTrue((self.root1 / "target-parent" / "moved-dir" / "nested.txt").exists())
|
self.assertTrue((self.root1 / "target-parent" / "moved-dir" / "nested.txt").exists())
|
||||||
self.assertFalse(src_dir.exists())
|
self.assertFalse(src_dir.exists())
|
||||||
|
|
||||||
|
def test_move_directory_success_same_root_with_nested_symlink_keeps_direct_move_semantics(self) -> None:
|
||||||
|
src_dir = self.root1 / "source-dir"
|
||||||
|
src_dir.mkdir()
|
||||||
|
real_dir = self.root1 / "real-dir"
|
||||||
|
real_dir.mkdir()
|
||||||
|
(real_dir / "nested.txt").write_text("hello", encoding="utf-8")
|
||||||
|
(src_dir / "link-dir").symlink_to(real_dir, target_is_directory=True)
|
||||||
|
target_parent = self.root1 / "target-parent"
|
||||||
|
target_parent.mkdir()
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/move",
|
||||||
|
{"source": "storage1/source-dir", "destination": "storage1/target-parent/moved-dir"},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 1)
|
||||||
|
self.assertTrue((self.root1 / "target-parent" / "moved-dir").is_dir())
|
||||||
|
self.assertTrue((self.root1 / "target-parent" / "moved-dir" / "link-dir").is_symlink())
|
||||||
|
self.assertFalse(src_dir.exists())
|
||||||
|
|
||||||
def test_move_success_cross_root_create_task_shape_and_completed(self) -> None:
|
def test_move_success_cross_root_create_task_shape_and_completed(self) -> None:
|
||||||
src = self.root1 / "source.txt"
|
src = self.root1 / "source.txt"
|
||||||
src.write_text("hello", encoding="utf-8")
|
src.write_text("hello", encoding="utf-8")
|
||||||
@@ -225,6 +282,95 @@ class MoveApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertFalse(source_file.exists())
|
self.assertFalse(source_file.exists())
|
||||||
self.assertFalse(source_dir.exists())
|
self.assertFalse(source_dir.exists())
|
||||||
|
|
||||||
|
def test_move_batch_cancelled_after_current_file_finishes(self) -> None:
|
||||||
|
blocking_fs = BlockingMoveFilesystemAdapter()
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root1), "storage2": str(self.root2)})
|
||||||
|
self._set_services(path_guard=path_guard, filesystem=blocking_fs)
|
||||||
|
(self.root1 / "a.txt").write_text("A", encoding="utf-8")
|
||||||
|
(self.root1 / "b.txt").write_text("B", encoding="utf-8")
|
||||||
|
target = self.root1 / "target"
|
||||||
|
target.mkdir()
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/move",
|
||||||
|
{
|
||||||
|
"sources": ["storage1/a.txt", "storage1/b.txt"],
|
||||||
|
"destination_base": "storage1/target",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
task_id = response.json()["task_id"]
|
||||||
|
self.assertTrue(blocking_fs.entered.wait(timeout=2.0))
|
||||||
|
running = self._wait_for_status(task_id, {"running"})
|
||||||
|
self.assertEqual(running["current_item"], "a.txt")
|
||||||
|
|
||||||
|
cancel_response = self._request("POST", f"/api/tasks/{task_id}/cancel")
|
||||||
|
self.assertEqual(cancel_response.status_code, 200)
|
||||||
|
self.assertEqual(cancel_response.json()["status"], "cancelling")
|
||||||
|
|
||||||
|
blocking_fs.release.set()
|
||||||
|
detail = self._wait_task(task_id)
|
||||||
|
self.assertEqual(detail["status"], "cancelled")
|
||||||
|
self.assertEqual(detail["done_items"], 1)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertTrue((target / "a.txt").exists())
|
||||||
|
self.assertTrue((self.root1 / "b.txt").exists())
|
||||||
|
self.assertFalse((target / "b.txt").exists())
|
||||||
|
|
||||||
|
def test_move_batch_cross_root_files_success(self) -> None:
|
||||||
|
first = self.root1 / "first.txt"
|
||||||
|
second = self.root1 / "second.txt"
|
||||||
|
first.write_text("a", encoding="utf-8")
|
||||||
|
second.write_text("b", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/move",
|
||||||
|
{
|
||||||
|
"sources": ["storage1/first.txt", "storage1/second.txt"],
|
||||||
|
"destination_base": "storage2",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertTrue((self.root2 / "first.txt").exists())
|
||||||
|
self.assertTrue((self.root2 / "second.txt").exists())
|
||||||
|
self.assertFalse(first.exists())
|
||||||
|
self.assertFalse(second.exists())
|
||||||
|
|
||||||
|
def test_move_batch_cross_root_files_falls_back_from_exdev(self) -> None:
|
||||||
|
first = self.root1 / "first.txt"
|
||||||
|
second = self.root1 / "second.txt"
|
||||||
|
first.write_text("a", encoding="utf-8")
|
||||||
|
second.write_text("b", encoding="utf-8")
|
||||||
|
|
||||||
|
path_guard = PathGuard({"storage1": str(self.root1), "storage2": str(self.root2)})
|
||||||
|
self._set_services(path_guard=path_guard, filesystem=CrossDeviceMoveFilesystemAdapter())
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/move",
|
||||||
|
{
|
||||||
|
"sources": ["storage1/first.txt", "storage1/second.txt"],
|
||||||
|
"destination_base": "storage2",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 202)
|
||||||
|
detail = self._wait_task(response.json()["task_id"])
|
||||||
|
self.assertEqual(detail["status"], "completed")
|
||||||
|
self.assertEqual(detail["done_items"], 2)
|
||||||
|
self.assertEqual(detail["total_items"], 2)
|
||||||
|
self.assertTrue((self.root2 / "first.txt").exists())
|
||||||
|
self.assertTrue((self.root2 / "second.txt").exists())
|
||||||
|
self.assertFalse(first.exists())
|
||||||
|
self.assertFalse(second.exists())
|
||||||
|
|
||||||
def test_move_batch_cross_root_directories_blocked(self) -> None:
|
def test_move_batch_cross_root_directories_blocked(self) -> None:
|
||||||
first = self.root1 / "first-dir"
|
first = self.root1 / "first-dir"
|
||||||
second = self.root1 / "second-dir"
|
second = self.root1 / "second-dir"
|
||||||
@@ -242,6 +388,26 @@ class MoveApiGoldenTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(response.status_code, 400)
|
self.assertEqual(response.status_code, 400)
|
||||||
self.assertEqual(response.json()["error"]["code"], "invalid_request")
|
self.assertEqual(response.json()["error"]["code"], "invalid_request")
|
||||||
|
self.assertEqual(response.json()["error"]["message"], "Cross-root batch move with directories is not supported in v1")
|
||||||
|
|
||||||
|
def test_move_batch_cross_root_mixed_files_and_directories_blocked(self) -> None:
|
||||||
|
first = self.root1 / "first.txt"
|
||||||
|
first.write_text("a", encoding="utf-8")
|
||||||
|
second = self.root1 / "second-dir"
|
||||||
|
second.mkdir()
|
||||||
|
|
||||||
|
response = self._request(
|
||||||
|
"POST",
|
||||||
|
"/api/files/move",
|
||||||
|
{
|
||||||
|
"sources": ["storage1/first.txt", "storage1/second-dir"],
|
||||||
|
"destination_base": "storage2",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "invalid_request")
|
||||||
|
self.assertEqual(response.json()["error"]["message"], "Cross-root batch move with directories is not supported in v1")
|
||||||
|
|
||||||
def test_move_batch_mixed_root_selection_blocked(self) -> None:
|
def test_move_batch_mixed_root_selection_blocked(self) -> None:
|
||||||
first = self.root1 / "first-dir"
|
first = self.root1 / "first-dir"
|
||||||
@@ -328,8 +494,10 @@ class MoveApiGoldenTest(unittest.TestCase):
|
|||||||
def test_move_batch_runtime_io_error_failed_task_shape(self) -> None:
|
def test_move_batch_runtime_io_error_failed_task_shape(self) -> None:
|
||||||
first = self.root1 / "ok-dir"
|
first = self.root1 / "ok-dir"
|
||||||
first.mkdir()
|
first.mkdir()
|
||||||
|
(first / "a.txt").write_text("A", encoding="utf-8")
|
||||||
second = self.root1 / "fail-dir"
|
second = self.root1 / "fail-dir"
|
||||||
second.mkdir()
|
second.mkdir()
|
||||||
|
(second / "b.txt").write_text("B", encoding="utf-8")
|
||||||
target = self.root1 / "target"
|
target = self.root1 / "target"
|
||||||
target.mkdir()
|
target.mkdir()
|
||||||
|
|
||||||
|
|||||||
@@ -49,6 +49,16 @@ class SettingsApiGoldenTest(unittest.TestCase):
|
|||||||
|
|
||||||
return asyncio.run(_run())
|
return asyncio.run(_run())
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _default_zip_download_limits() -> dict:
|
||||||
|
return {
|
||||||
|
"max_items": 1000,
|
||||||
|
"max_total_input_bytes": 2147483648,
|
||||||
|
"max_individual_file_bytes": 524288000,
|
||||||
|
"scan_timeout_seconds": 10.0,
|
||||||
|
"symlink_policy": "not_allowed",
|
||||||
|
}
|
||||||
|
|
||||||
def test_settings_default_response(self) -> None:
|
def test_settings_default_response(self) -> None:
|
||||||
response = self._request("GET", "/api/settings")
|
response = self._request("GET", "/api/settings")
|
||||||
|
|
||||||
@@ -59,6 +69,9 @@ class SettingsApiGoldenTest(unittest.TestCase):
|
|||||||
"show_thumbnails": False,
|
"show_thumbnails": False,
|
||||||
"preferred_startup_path_left": None,
|
"preferred_startup_path_left": None,
|
||||||
"preferred_startup_path_right": None,
|
"preferred_startup_path_right": None,
|
||||||
|
"selected_theme": "default",
|
||||||
|
"selected_color_mode": "dark",
|
||||||
|
"zip_download_limits": self._default_zip_download_limits(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -75,6 +88,9 @@ class SettingsApiGoldenTest(unittest.TestCase):
|
|||||||
"show_thumbnails": False,
|
"show_thumbnails": False,
|
||||||
"preferred_startup_path_left": "storage1/docs",
|
"preferred_startup_path_left": "storage1/docs",
|
||||||
"preferred_startup_path_right": None,
|
"preferred_startup_path_right": None,
|
||||||
|
"selected_theme": "default",
|
||||||
|
"selected_color_mode": "dark",
|
||||||
|
"zip_download_limits": self._default_zip_download_limits(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -96,6 +112,9 @@ class SettingsApiGoldenTest(unittest.TestCase):
|
|||||||
"show_thumbnails": True,
|
"show_thumbnails": True,
|
||||||
"preferred_startup_path_left": "storage1/docs",
|
"preferred_startup_path_left": "storage1/docs",
|
||||||
"preferred_startup_path_right": "storage1/docs",
|
"preferred_startup_path_right": "storage1/docs",
|
||||||
|
"selected_theme": "default",
|
||||||
|
"selected_color_mode": "dark",
|
||||||
|
"zip_download_limits": self._default_zip_download_limits(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@@ -104,6 +123,9 @@ class SettingsApiGoldenTest(unittest.TestCase):
|
|||||||
"show_thumbnails": True,
|
"show_thumbnails": True,
|
||||||
"preferred_startup_path_left": "storage1/docs",
|
"preferred_startup_path_left": "storage1/docs",
|
||||||
"preferred_startup_path_right": "storage1/docs",
|
"preferred_startup_path_right": "storage1/docs",
|
||||||
|
"selected_theme": "default",
|
||||||
|
"selected_color_mode": "dark",
|
||||||
|
"zip_download_limits": self._default_zip_download_limits(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -113,6 +135,9 @@ class SettingsApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(response.json()["preferred_startup_path_left"], "storage1/docs")
|
self.assertEqual(response.json()["preferred_startup_path_left"], "storage1/docs")
|
||||||
self.assertEqual(response.json()["preferred_startup_path_right"], None)
|
self.assertEqual(response.json()["preferred_startup_path_right"], None)
|
||||||
|
self.assertEqual(response.json()["selected_theme"], "default")
|
||||||
|
self.assertEqual(response.json()["selected_color_mode"], "dark")
|
||||||
|
self.assertEqual(response.json()["zip_download_limits"], self._default_zip_download_limits())
|
||||||
|
|
||||||
def test_settings_preferred_startup_path_right_persistence(self) -> None:
|
def test_settings_preferred_startup_path_right_persistence(self) -> None:
|
||||||
response = self._request("POST", "/api/settings", {"preferred_startup_path_right": "storage1/docs"})
|
response = self._request("POST", "/api/settings", {"preferred_startup_path_right": "storage1/docs"})
|
||||||
@@ -120,6 +145,9 @@ class SettingsApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(response.json()["preferred_startup_path_left"], None)
|
self.assertEqual(response.json()["preferred_startup_path_left"], None)
|
||||||
self.assertEqual(response.json()["preferred_startup_path_right"], "storage1/docs")
|
self.assertEqual(response.json()["preferred_startup_path_right"], "storage1/docs")
|
||||||
|
self.assertEqual(response.json()["selected_theme"], "default")
|
||||||
|
self.assertEqual(response.json()["selected_color_mode"], "dark")
|
||||||
|
self.assertEqual(response.json()["zip_download_limits"], self._default_zip_download_limits())
|
||||||
|
|
||||||
def test_settings_preferred_startup_path_empty_string_resets_only_left_to_null(self) -> None:
|
def test_settings_preferred_startup_path_empty_string_resets_only_left_to_null(self) -> None:
|
||||||
self._request(
|
self._request(
|
||||||
@@ -135,6 +163,51 @@ class SettingsApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(response.json()["preferred_startup_path_left"], None)
|
self.assertEqual(response.json()["preferred_startup_path_left"], None)
|
||||||
self.assertEqual(response.json()["preferred_startup_path_right"], "storage1/docs")
|
self.assertEqual(response.json()["preferred_startup_path_right"], "storage1/docs")
|
||||||
|
self.assertEqual(response.json()["selected_theme"], "default")
|
||||||
|
self.assertEqual(response.json()["selected_color_mode"], "dark")
|
||||||
|
self.assertEqual(response.json()["zip_download_limits"], self._default_zip_download_limits())
|
||||||
|
|
||||||
|
def test_settings_selected_theme_persistence(self) -> None:
|
||||||
|
response = self._request("POST", "/api/settings", {"selected_theme": "midnight"})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual(response.json()["selected_theme"], "midnight")
|
||||||
|
self.assertEqual(response.json()["selected_color_mode"], "dark")
|
||||||
|
self.assertEqual(response.json()["zip_download_limits"], self._default_zip_download_limits())
|
||||||
|
|
||||||
|
def test_settings_selected_theme_accepts_new_built_in_family(self) -> None:
|
||||||
|
response = self._request("POST", "/api/settings", {"selected_theme": "commander-electric"})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual(response.json()["selected_theme"], "commander-electric")
|
||||||
|
self.assertEqual(response.json()["selected_color_mode"], "dark")
|
||||||
|
self.assertEqual(response.json()["zip_download_limits"], self._default_zip_download_limits())
|
||||||
|
|
||||||
|
def test_settings_selected_color_mode_persistence(self) -> None:
|
||||||
|
response = self._request("POST", "/api/settings", {"selected_color_mode": "light"})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual(response.json()["selected_theme"], "default")
|
||||||
|
self.assertEqual(response.json()["selected_color_mode"], "light")
|
||||||
|
self.assertEqual(response.json()["zip_download_limits"], self._default_zip_download_limits())
|
||||||
|
|
||||||
|
def test_settings_includes_read_only_zip_download_limits(self) -> None:
|
||||||
|
response = self._request("GET", "/api/settings")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual(response.json()["zip_download_limits"], self._default_zip_download_limits())
|
||||||
|
|
||||||
|
def test_settings_rejects_invalid_selected_theme(self) -> None:
|
||||||
|
response = self._request("POST", "/api/settings", {"selected_theme": "unknown"})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "invalid_request")
|
||||||
|
|
||||||
|
def test_settings_rejects_invalid_selected_color_mode(self) -> None:
|
||||||
|
response = self._request("POST", "/api/settings", {"selected_color_mode": "sepia"})
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertEqual(response.json()["error"]["code"], "invalid_request")
|
||||||
|
|
||||||
def test_settings_preferred_startup_path_left_rejects_file_path(self) -> None:
|
def test_settings_preferred_startup_path_left_rejects_file_path(self) -> None:
|
||||||
response = self._request("POST", "/api/settings", {"preferred_startup_path_left": "storage1/file.txt"})
|
response = self._request("POST", "/api/settings", {"preferred_startup_path_left": "storage1/file.txt"})
|
||||||
|
|||||||
@@ -40,6 +40,14 @@ class TasksApiGoldenTest(unittest.TestCase):
|
|||||||
|
|
||||||
return asyncio.run(_run())
|
return asyncio.run(_run())
|
||||||
|
|
||||||
|
def _post(self, url: str, payload: dict | None = None) -> httpx.Response:
|
||||||
|
async def _run() -> httpx.Response:
|
||||||
|
transport = httpx.ASGITransport(app=app)
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||||
|
return await client.post(url, json=payload)
|
||||||
|
|
||||||
|
return asyncio.run(_run())
|
||||||
|
|
||||||
def _insert_task(
|
def _insert_task(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
@@ -241,6 +249,215 @@ class TasksApiGoldenTest(unittest.TestCase):
|
|||||||
self.assertEqual(body["error_code"], "io_error")
|
self.assertEqual(body["error_code"], "io_error")
|
||||||
self.assertEqual(body["error_message"], "write failed")
|
self.assertEqual(body["error_message"], "write failed")
|
||||||
|
|
||||||
|
def test_get_task_detail_delete_running(self) -> None:
|
||||||
|
self._insert_task(
|
||||||
|
task_id="task-delete",
|
||||||
|
operation="delete",
|
||||||
|
status="running",
|
||||||
|
source="storage1/trash.txt",
|
||||||
|
destination="",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
started_at="2026-03-10T10:00:01Z",
|
||||||
|
done_items=0,
|
||||||
|
total_items=1,
|
||||||
|
current_item="trash.txt",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._get("/api/tasks/task-delete")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["operation"], "delete")
|
||||||
|
self.assertEqual(body["status"], "running")
|
||||||
|
self.assertEqual(body["done_items"], 0)
|
||||||
|
self.assertEqual(body["total_items"], 1)
|
||||||
|
self.assertEqual(body["current_item"], "trash.txt")
|
||||||
|
|
||||||
|
def test_cancel_running_delete_task_returns_cancelling(self) -> None:
|
||||||
|
self._insert_task(
|
||||||
|
task_id="task-delete",
|
||||||
|
operation="delete",
|
||||||
|
status="running",
|
||||||
|
source="storage1/trash.txt",
|
||||||
|
destination="",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
started_at="2026-03-10T10:00:01Z",
|
||||||
|
done_items=0,
|
||||||
|
total_items=1,
|
||||||
|
current_item="trash.txt",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._post("/api/tasks/task-delete/cancel")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["operation"], "delete")
|
||||||
|
self.assertEqual(body["status"], "cancelling")
|
||||||
|
self.assertEqual(body["current_item"], "trash.txt")
|
||||||
|
|
||||||
|
def test_cancel_completed_task_rejected(self) -> None:
|
||||||
|
self._insert_task(
|
||||||
|
task_id="task-copy",
|
||||||
|
operation="copy",
|
||||||
|
status="completed",
|
||||||
|
source="storage1/a.txt",
|
||||||
|
destination="storage2/a.txt",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
finished_at="2026-03-10T10:00:04Z",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._post("/api/tasks/task-copy/cancel")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(
|
||||||
|
response.json(),
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "task_not_cancellable",
|
||||||
|
"message": "Task cannot be cancelled",
|
||||||
|
"details": {"task_id": "task-copy", "status": "completed"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_cancel_download_task_rejected(self) -> None:
|
||||||
|
self._insert_task(
|
||||||
|
task_id="task-download",
|
||||||
|
operation="download",
|
||||||
|
status="preparing",
|
||||||
|
source="single_directory_zip",
|
||||||
|
destination="docs.zip",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
started_at="2026-03-10T10:00:01Z",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._post("/api/tasks/task-download/cancel")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(
|
||||||
|
response.json(),
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "task_not_cancellable",
|
||||||
|
"message": "Task cannot be cancelled",
|
||||||
|
"details": {"task_id": "task-download", "status": "preparing"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_task_detail_ready_archive_download(self) -> None:
|
||||||
|
self._insert_task(
|
||||||
|
task_id="task-download-ready",
|
||||||
|
operation="download",
|
||||||
|
status="ready",
|
||||||
|
source="storage1/docs",
|
||||||
|
destination="docs.zip",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
started_at="2026-03-10T10:00:01Z",
|
||||||
|
finished_at="2026-03-10T10:00:05Z",
|
||||||
|
done_items=1,
|
||||||
|
total_items=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._get("/api/tasks/task-download-ready")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["operation"], "download")
|
||||||
|
self.assertEqual(body["status"], "ready")
|
||||||
|
self.assertEqual(body["destination"], "docs.zip")
|
||||||
|
|
||||||
|
def test_get_task_detail_duplicate_completed(self) -> None:
|
||||||
|
self._insert_task(
|
||||||
|
task_id="task-duplicate",
|
||||||
|
operation="duplicate",
|
||||||
|
status="completed",
|
||||||
|
source="storage1/report.txt",
|
||||||
|
destination="storage1/report copy.txt",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
started_at="2026-03-10T10:00:01Z",
|
||||||
|
finished_at="2026-03-10T10:00:03Z",
|
||||||
|
done_items=1,
|
||||||
|
total_items=1,
|
||||||
|
current_item="storage1/report.txt",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._get("/api/tasks/task-duplicate")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["operation"], "duplicate")
|
||||||
|
self.assertEqual(body["status"], "completed")
|
||||||
|
self.assertEqual(body["done_items"], 1)
|
||||||
|
self.assertEqual(body["total_items"], 1)
|
||||||
|
|
||||||
|
def test_get_task_detail_requested_archive_download(self) -> None:
|
||||||
|
self._insert_task(
|
||||||
|
task_id="task-download-requested",
|
||||||
|
operation="download",
|
||||||
|
status="requested",
|
||||||
|
source="storage1/docs",
|
||||||
|
destination="docs.zip",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
done_items=0,
|
||||||
|
total_items=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._get("/api/tasks/task-download-requested")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["operation"], "download")
|
||||||
|
self.assertEqual(body["status"], "requested")
|
||||||
|
self.assertEqual(body["done_items"], 0)
|
||||||
|
self.assertEqual(body["total_items"], 1)
|
||||||
|
|
||||||
|
def test_get_task_detail_preparing_archive_download_with_current_item(self) -> None:
|
||||||
|
self._insert_task(
|
||||||
|
task_id="task-download-preparing",
|
||||||
|
operation="download",
|
||||||
|
status="preparing",
|
||||||
|
source="storage1/docs",
|
||||||
|
destination="docs.zip",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
started_at="2026-03-10T10:00:01Z",
|
||||||
|
done_items=1,
|
||||||
|
total_items=3,
|
||||||
|
current_item="storage1/docs/b.txt",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._get("/api/tasks/task-download-preparing")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["operation"], "download")
|
||||||
|
self.assertEqual(body["status"], "preparing")
|
||||||
|
self.assertEqual(body["done_items"], 1)
|
||||||
|
self.assertEqual(body["total_items"], 3)
|
||||||
|
self.assertEqual(body["current_item"], "storage1/docs/b.txt")
|
||||||
|
|
||||||
|
def test_get_task_detail_cancelled_archive_download(self) -> None:
|
||||||
|
self._insert_task(
|
||||||
|
task_id="task-download-cancelled",
|
||||||
|
operation="download",
|
||||||
|
status="cancelled",
|
||||||
|
source="storage1/docs",
|
||||||
|
destination="docs.zip",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
started_at="2026-03-10T10:00:01Z",
|
||||||
|
finished_at="2026-03-10T10:00:03Z",
|
||||||
|
done_items=0,
|
||||||
|
total_items=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = self._get("/api/tasks/task-download-cancelled")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["operation"], "download")
|
||||||
|
self.assertEqual(body["status"], "cancelled")
|
||||||
|
self.assertEqual(body["destination"], "docs.zip")
|
||||||
|
|
||||||
def test_get_task_not_found(self) -> None:
|
def test_get_task_not_found(self) -> None:
|
||||||
response = self._get("/api/tasks/task-missing")
|
response = self._get("/api/tasks/task-missing")
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,204 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||||
|
|
||||||
|
from backend.app.dependencies import get_file_ops_service, get_history_service
|
||||||
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
|
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||||
|
from backend.app.main import app
|
||||||
|
from backend.app.security.path_guard import PathGuard
|
||||||
|
from backend.app.services.file_ops_service import FileOpsService
|
||||||
|
from backend.app.services.history_service import HistoryService
|
||||||
|
|
||||||
|
|
||||||
|
class UploadApiGoldenTest(unittest.TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
|
self.root = Path(self.temp_dir.name) / "root"
|
||||||
|
self.root.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.uploads_dir = self.root / "uploads"
|
||||||
|
self.uploads_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.db_path = str(Path(self.temp_dir.name) / "history.db")
|
||||||
|
|
||||||
|
history_repository = HistoryRepository(self.db_path)
|
||||||
|
file_ops_service = FileOpsService(
|
||||||
|
path_guard=PathGuard({"storage1": str(self.root)}),
|
||||||
|
filesystem=FilesystemAdapter(),
|
||||||
|
history_repository=history_repository,
|
||||||
|
)
|
||||||
|
history_service = HistoryService(repository=history_repository)
|
||||||
|
|
||||||
|
async def _override_file_ops_service() -> FileOpsService:
|
||||||
|
return file_ops_service
|
||||||
|
|
||||||
|
async def _override_history_service() -> HistoryService:
|
||||||
|
return history_service
|
||||||
|
|
||||||
|
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
||||||
|
app.dependency_overrides[get_history_service] = _override_history_service
|
||||||
|
|
||||||
|
def tearDown(self) -> None:
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
self.temp_dir.cleanup()
|
||||||
|
|
||||||
|
def _upload(self, *, target_path: str, filename: str, content: bytes, overwrite: bool = False) -> httpx.Response:
|
||||||
|
async def _run() -> httpx.Response:
|
||||||
|
transport = httpx.ASGITransport(app=app)
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||||
|
return await client.post(
|
||||||
|
"/api/files/upload",
|
||||||
|
data={"target_path": target_path, "overwrite": "true" if overwrite else "false"},
|
||||||
|
files={"file": (filename, content, "application/octet-stream")},
|
||||||
|
)
|
||||||
|
|
||||||
|
return asyncio.run(_run())
|
||||||
|
|
||||||
|
def _get_history(self) -> list[dict]:
|
||||||
|
async def _run() -> list[dict]:
|
||||||
|
transport = httpx.ASGITransport(app=app)
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||||
|
response = await client.get("/api/history")
|
||||||
|
return response.json()["items"]
|
||||||
|
|
||||||
|
return asyncio.run(_run())
|
||||||
|
|
||||||
|
def test_upload_single_file_success(self) -> None:
|
||||||
|
response = self._upload(target_path="storage1/uploads", filename="hello.txt", content=b"hello")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = response.json()
|
||||||
|
self.assertEqual(body["path"], "storage1/uploads/hello.txt")
|
||||||
|
self.assertEqual(body["size"], 5)
|
||||||
|
self.assertTrue((self.uploads_dir / "hello.txt").exists())
|
||||||
|
|
||||||
|
history = self._get_history()
|
||||||
|
self.assertEqual(history[0]["operation"], "upload")
|
||||||
|
self.assertEqual(history[0]["status"], "completed")
|
||||||
|
self.assertEqual(history[0]["destination"], "storage1/uploads/hello.txt")
|
||||||
|
|
||||||
|
def test_upload_target_path_not_found(self) -> None:
|
||||||
|
response = self._upload(target_path="storage1/missing", filename="hello.txt", content=b"hello")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 404)
|
||||||
|
self.assertEqual(
|
||||||
|
response.json(),
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "path_not_found",
|
||||||
|
"message": "Requested path was not found",
|
||||||
|
"details": {"path": "storage1/missing"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_upload_target_path_is_file(self) -> None:
|
||||||
|
target_file = self.root / "not_a_directory.txt"
|
||||||
|
target_file.write_text("x", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._upload(target_path="storage1/not_a_directory.txt", filename="hello.txt", content=b"hello")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(
|
||||||
|
response.json(),
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "path_type_conflict",
|
||||||
|
"message": "Requested path is not a directory",
|
||||||
|
"details": {"path": "storage1/not_a_directory.txt"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_upload_traversal_blocked(self) -> None:
|
||||||
|
response = self._upload(target_path="storage1/../etc", filename="hello.txt", content=b"hello")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 403)
|
||||||
|
self.assertEqual(
|
||||||
|
response.json(),
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "path_traversal_detected",
|
||||||
|
"message": "Path traversal is not allowed",
|
||||||
|
"details": {"path": "storage1/../etc"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_upload_invalid_root_alias(self) -> None:
|
||||||
|
response = self._upload(target_path="unknown/uploads", filename="hello.txt", content=b"hello")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 403)
|
||||||
|
self.assertEqual(
|
||||||
|
response.json(),
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "invalid_root_alias",
|
||||||
|
"message": "Unknown root alias",
|
||||||
|
"details": {"path": "unknown/uploads"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_upload_invalid_filename_blocked(self) -> None:
|
||||||
|
response = self._upload(target_path="storage1/uploads", filename="..", content=b"hello")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 400)
|
||||||
|
self.assertEqual(
|
||||||
|
response.json(),
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "invalid_request",
|
||||||
|
"message": "Invalid name",
|
||||||
|
"details": {"name": ".."},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_upload_conflict_on_existing_file(self) -> None:
|
||||||
|
existing = self.uploads_dir / "hello.txt"
|
||||||
|
existing.write_text("existing", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._upload(target_path="storage1/uploads", filename="hello.txt", content=b"hello")
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 409)
|
||||||
|
self.assertEqual(
|
||||||
|
response.json(),
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "already_exists",
|
||||||
|
"message": "Target path already exists",
|
||||||
|
"details": {"path": "storage1/uploads/hello.txt"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
history = self._get_history()
|
||||||
|
self.assertEqual(history[0]["operation"], "upload")
|
||||||
|
self.assertEqual(history[0]["status"], "failed")
|
||||||
|
self.assertEqual(history[0]["error_code"], "already_exists")
|
||||||
|
|
||||||
|
def test_upload_overwrite_existing_file_success(self) -> None:
|
||||||
|
existing = self.uploads_dir / "hello.txt"
|
||||||
|
existing.write_text("existing", encoding="utf-8")
|
||||||
|
|
||||||
|
response = self._upload(
|
||||||
|
target_path="storage1/uploads",
|
||||||
|
filename="hello.txt",
|
||||||
|
content=b"replacement",
|
||||||
|
overwrite=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual((self.uploads_dir / "hello.txt").read_bytes(), b"replacement")
|
||||||
|
|
||||||
|
history = self._get_history()
|
||||||
|
self.assertEqual(history[0]["operation"], "upload")
|
||||||
|
self.assertEqual(history[0]["status"], "completed")
|
||||||
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,146 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||||
|
|
||||||
|
from backend.app.db.history_repository import HistoryRepository
|
||||||
|
from backend.app.db.task_repository import TaskRepository
|
||||||
|
from backend.app.services.task_recovery_service import reconcile_persisted_incomplete_tasks
|
||||||
|
|
||||||
|
|
||||||
|
class TaskRecoveryServiceTest(unittest.TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
|
self.db_path = str(Path(self.temp_dir.name) / "tasks.db")
|
||||||
|
self.task_repo = TaskRepository(self.db_path)
|
||||||
|
self.history_repo = HistoryRepository(self.db_path)
|
||||||
|
|
||||||
|
def tearDown(self) -> None:
|
||||||
|
self.temp_dir.cleanup()
|
||||||
|
|
||||||
|
def test_reconcile_persisted_incomplete_tasks_marks_old_non_terminal_tasks_failed(self) -> None:
|
||||||
|
self.task_repo.insert_task_for_testing(
|
||||||
|
{
|
||||||
|
"id": "task-running",
|
||||||
|
"operation": "copy",
|
||||||
|
"status": "running",
|
||||||
|
"source": "storage1/a.txt",
|
||||||
|
"destination": "storage2/a.txt",
|
||||||
|
"created_at": "2026-03-10T10:00:00Z",
|
||||||
|
"started_at": "2026-03-10T10:00:01Z",
|
||||||
|
"current_item": "storage1/a.txt",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.history_repo.create_entry(
|
||||||
|
entry_id="task-running",
|
||||||
|
operation="copy",
|
||||||
|
status="queued",
|
||||||
|
source="storage1/a.txt",
|
||||||
|
destination="storage2/a.txt",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
)
|
||||||
|
self.task_repo.insert_task_for_testing(
|
||||||
|
{
|
||||||
|
"id": "task-ready",
|
||||||
|
"operation": "download",
|
||||||
|
"status": "ready",
|
||||||
|
"source": "single_directory_zip",
|
||||||
|
"destination": "docs.zip",
|
||||||
|
"created_at": "2026-03-10T10:02:00Z",
|
||||||
|
"finished_at": "2026-03-10T10:03:00Z",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
changed = reconcile_persisted_incomplete_tasks(self.task_repo, self.history_repo)
|
||||||
|
|
||||||
|
self.assertEqual(changed, ["task-running"])
|
||||||
|
task = self.task_repo.get_task("task-running")
|
||||||
|
self.assertEqual(task["status"], "failed")
|
||||||
|
self.assertEqual(task["error_code"], "task_interrupted")
|
||||||
|
self.assertEqual(task["error_message"], "Task was interrupted before completion")
|
||||||
|
self.assertIsNone(task["current_item"])
|
||||||
|
history = self.history_repo.list_history(limit=5)[0]
|
||||||
|
self.assertEqual(history["id"], "task-running")
|
||||||
|
self.assertEqual(history["status"], "failed")
|
||||||
|
self.assertEqual(history["error_code"], "task_interrupted")
|
||||||
|
ready_task = self.task_repo.get_task("task-ready")
|
||||||
|
self.assertEqual(ready_task["status"], "ready")
|
||||||
|
|
||||||
|
def test_reconcile_persisted_incomplete_tasks_is_noop_when_all_tasks_terminal(self) -> None:
|
||||||
|
self.task_repo.insert_task_for_testing(
|
||||||
|
{
|
||||||
|
"id": "task-completed",
|
||||||
|
"operation": "move",
|
||||||
|
"status": "completed",
|
||||||
|
"source": "storage1/a.txt",
|
||||||
|
"destination": "storage2/a.txt",
|
||||||
|
"created_at": "2026-03-10T10:00:00Z",
|
||||||
|
"finished_at": "2026-03-10T10:00:02Z",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
changed = reconcile_persisted_incomplete_tasks(self.task_repo, self.history_repo)
|
||||||
|
|
||||||
|
self.assertEqual(changed, [])
|
||||||
|
self.assertEqual(self.task_repo.get_task("task-completed")["status"], "completed")
|
||||||
|
|
||||||
|
def test_reconcile_persisted_incomplete_tasks_marks_stale_delete_task_failed(self) -> None:
|
||||||
|
self.task_repo.insert_task_for_testing(
|
||||||
|
{
|
||||||
|
"id": "task-delete",
|
||||||
|
"operation": "delete",
|
||||||
|
"status": "running",
|
||||||
|
"source": "storage1/trash.txt",
|
||||||
|
"destination": "",
|
||||||
|
"created_at": "2026-03-10T10:00:00Z",
|
||||||
|
"started_at": "2026-03-10T10:00:01Z",
|
||||||
|
"current_item": "storage1/trash.txt",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
changed = reconcile_persisted_incomplete_tasks(self.task_repo, self.history_repo)
|
||||||
|
|
||||||
|
self.assertEqual(changed, ["task-delete"])
|
||||||
|
task = self.task_repo.get_task("task-delete")
|
||||||
|
self.assertEqual(task["status"], "failed")
|
||||||
|
self.assertEqual(task["error_code"], "task_interrupted")
|
||||||
|
|
||||||
|
def test_reconcile_persisted_incomplete_tasks_marks_stale_cancelling_task_failed(self) -> None:
|
||||||
|
self.task_repo.insert_task_for_testing(
|
||||||
|
{
|
||||||
|
"id": "task-cancelling",
|
||||||
|
"operation": "duplicate",
|
||||||
|
"status": "cancelling",
|
||||||
|
"source": "2 items",
|
||||||
|
"destination": "same directory",
|
||||||
|
"created_at": "2026-03-10T10:00:00Z",
|
||||||
|
"started_at": "2026-03-10T10:00:01Z",
|
||||||
|
"current_item": "storage1/report.txt",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.history_repo.create_entry(
|
||||||
|
entry_id="task-cancelling",
|
||||||
|
operation="duplicate",
|
||||||
|
status="queued",
|
||||||
|
source="2 items",
|
||||||
|
destination="same directory",
|
||||||
|
created_at="2026-03-10T10:00:00Z",
|
||||||
|
)
|
||||||
|
|
||||||
|
changed = reconcile_persisted_incomplete_tasks(self.task_repo, self.history_repo)
|
||||||
|
|
||||||
|
self.assertEqual(changed, ["task-cancelling"])
|
||||||
|
task = self.task_repo.get_task("task-cancelling")
|
||||||
|
self.assertEqual(task["status"], "failed")
|
||||||
|
self.assertEqual(task["error_code"], "task_interrupted")
|
||||||
|
history = self.history_repo.list_history(limit=5)[0]
|
||||||
|
self.assertEqual(history["id"], "task-cancelling")
|
||||||
|
self.assertEqual(history["status"], "failed")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -59,6 +59,174 @@ class TaskRepositoryTest(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_create_download_task_with_requested_status_and_artifact(self) -> None:
|
||||||
|
created = self.repo.create_task(
|
||||||
|
operation="download",
|
||||||
|
source="storage1/docs",
|
||||||
|
destination="docs.zip",
|
||||||
|
status="requested",
|
||||||
|
)
|
||||||
|
self.repo.upsert_artifact(
|
||||||
|
task_id=created["id"],
|
||||||
|
file_path="/tmp/archive.zip",
|
||||||
|
file_name="docs.zip",
|
||||||
|
expires_at="2026-03-10T10:30:00Z",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = self.repo.get_task(created["id"])
|
||||||
|
artifact = self.repo.get_artifact(created["id"])
|
||||||
|
|
||||||
|
self.assertEqual(task["operation"], "download")
|
||||||
|
self.assertEqual(task["status"], "requested")
|
||||||
|
self.assertEqual(artifact["file_name"], "docs.zip")
|
||||||
|
|
||||||
|
def test_create_duplicate_task_is_allowed(self) -> None:
|
||||||
|
created = self.repo.create_task(
|
||||||
|
operation="duplicate",
|
||||||
|
source="storage1/report.txt",
|
||||||
|
destination="storage1/report copy.txt",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = self.repo.get_task(created["id"])
|
||||||
|
|
||||||
|
self.assertEqual(task["operation"], "duplicate")
|
||||||
|
self.assertEqual(task["status"], "queued")
|
||||||
|
|
||||||
|
def test_mark_cancelled_transitions_requested_download_task(self) -> None:
|
||||||
|
created = self.repo.create_task(
|
||||||
|
operation="download",
|
||||||
|
source="storage1/docs",
|
||||||
|
destination="docs.zip",
|
||||||
|
status="requested",
|
||||||
|
)
|
||||||
|
|
||||||
|
changed = self.repo.mark_cancelled(created["id"])
|
||||||
|
task = self.repo.get_task(created["id"])
|
||||||
|
|
||||||
|
self.assertTrue(changed)
|
||||||
|
self.assertEqual(task["status"], "cancelled")
|
||||||
|
self.assertIsNotNone(task["finished_at"])
|
||||||
|
|
||||||
|
def test_request_cancellation_moves_running_file_task_to_cancelling(self) -> None:
|
||||||
|
created = self.repo.create_task(
|
||||||
|
operation="copy",
|
||||||
|
source="storage1/docs/a.txt",
|
||||||
|
destination="storage1/docs-copy/a.txt",
|
||||||
|
)
|
||||||
|
self.repo.mark_running(
|
||||||
|
created["id"],
|
||||||
|
done_items=0,
|
||||||
|
total_items=2,
|
||||||
|
current_item="storage1/docs/a.txt",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = self.repo.request_cancellation(created["id"])
|
||||||
|
|
||||||
|
self.assertIsNotNone(task)
|
||||||
|
self.assertEqual(task["status"], "cancelling")
|
||||||
|
self.assertEqual(task["current_item"], "storage1/docs/a.txt")
|
||||||
|
self.assertIsNone(task["finished_at"])
|
||||||
|
|
||||||
|
def test_request_cancellation_moves_queued_file_task_to_cancelled(self) -> None:
|
||||||
|
created = self.repo.create_task(
|
||||||
|
operation="delete",
|
||||||
|
source="storage1/docs/a.txt",
|
||||||
|
destination="",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = self.repo.request_cancellation(created["id"])
|
||||||
|
|
||||||
|
self.assertIsNotNone(task)
|
||||||
|
self.assertEqual(task["status"], "cancelled")
|
||||||
|
self.assertIsNone(task["current_item"])
|
||||||
|
self.assertIsNotNone(task["finished_at"])
|
||||||
|
|
||||||
|
def test_finalize_cancelled_transitions_cancelling_task(self) -> None:
|
||||||
|
created = self.repo.create_task(
|
||||||
|
operation="move",
|
||||||
|
source="storage1/docs/a.txt",
|
||||||
|
destination="storage1/archive/a.txt",
|
||||||
|
)
|
||||||
|
self.repo.mark_running(
|
||||||
|
created["id"],
|
||||||
|
done_items=0,
|
||||||
|
total_items=3,
|
||||||
|
current_item="storage1/docs/a.txt",
|
||||||
|
)
|
||||||
|
self.repo.request_cancellation(created["id"])
|
||||||
|
|
||||||
|
changed = self.repo.finalize_cancelled(
|
||||||
|
created["id"],
|
||||||
|
done_items=1,
|
||||||
|
total_items=3,
|
||||||
|
)
|
||||||
|
task = self.repo.get_task(created["id"])
|
||||||
|
|
||||||
|
self.assertTrue(changed)
|
||||||
|
self.assertEqual(task["status"], "cancelled")
|
||||||
|
self.assertEqual(task["done_items"], 1)
|
||||||
|
self.assertEqual(task["total_items"], 3)
|
||||||
|
self.assertIsNone(task["current_item"])
|
||||||
|
self.assertIsNotNone(task["finished_at"])
|
||||||
|
|
||||||
|
def test_reconcile_incomplete_tasks_marks_non_terminal_failed(self) -> None:
|
||||||
|
self.repo.insert_task_for_testing(
|
||||||
|
{
|
||||||
|
"id": "task-running",
|
||||||
|
"operation": "copy",
|
||||||
|
"status": "running",
|
||||||
|
"source": "storage1/a",
|
||||||
|
"destination": "storage2/a",
|
||||||
|
"created_at": "2026-03-10T09:00:00Z",
|
||||||
|
"started_at": "2026-03-10T09:00:01Z",
|
||||||
|
"current_item": "storage1/a",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.repo.insert_task_for_testing(
|
||||||
|
{
|
||||||
|
"id": "task-completed",
|
||||||
|
"operation": "copy",
|
||||||
|
"status": "completed",
|
||||||
|
"source": "storage1/b",
|
||||||
|
"destination": "storage2/b",
|
||||||
|
"created_at": "2026-03-10T09:05:00Z",
|
||||||
|
"finished_at": "2026-03-10T09:06:00Z",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
changed = self.repo.reconcile_incomplete_tasks()
|
||||||
|
|
||||||
|
running = self.repo.get_task("task-running")
|
||||||
|
completed = self.repo.get_task("task-completed")
|
||||||
|
self.assertEqual(changed, ["task-running"])
|
||||||
|
self.assertEqual(running["status"], "failed")
|
||||||
|
self.assertEqual(running["error_code"], "task_interrupted")
|
||||||
|
self.assertEqual(running["error_message"], "Task was interrupted before completion")
|
||||||
|
self.assertIsNone(running["current_item"])
|
||||||
|
self.assertIsNotNone(running["finished_at"])
|
||||||
|
self.assertEqual(completed["status"], "completed")
|
||||||
|
|
||||||
|
def test_reconcile_incomplete_tasks_removes_stale_artifact(self) -> None:
|
||||||
|
created = self.repo.create_task(
|
||||||
|
operation="download",
|
||||||
|
source="storage1/docs",
|
||||||
|
destination="docs.zip",
|
||||||
|
status="preparing",
|
||||||
|
)
|
||||||
|
self.repo.upsert_artifact(
|
||||||
|
task_id=created["id"],
|
||||||
|
file_path="/tmp/docs.zip.partial",
|
||||||
|
file_name="docs.zip",
|
||||||
|
expires_at="2026-03-10T10:30:00Z",
|
||||||
|
)
|
||||||
|
|
||||||
|
changed = self.repo.reconcile_incomplete_tasks()
|
||||||
|
|
||||||
|
task = self.repo.get_task(created["id"])
|
||||||
|
self.assertEqual(changed, [created["id"]])
|
||||||
|
self.assertEqual(task["status"], "failed")
|
||||||
|
self.assertIsNone(self.repo.get_artifact(created["id"]))
|
||||||
|
|
||||||
def test_migrates_legacy_tasks_schema_missing_source_destination(self) -> None:
|
def test_migrates_legacy_tasks_schema_missing_source_destination(self) -> None:
|
||||||
legacy_db_path = Path(self.temp_dir.name) / "legacy.db"
|
legacy_db_path = Path(self.temp_dir.name) / "legacy.db"
|
||||||
conn = sqlite3.connect(legacy_db_path)
|
conn = sqlite3.connect(legacy_db_path)
|
||||||
|
|||||||
+2889
-166
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 3.3 MiB |
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 3.3 MiB |
@@ -6,56 +6,6 @@
|
|||||||
--shadow-panel: 0 2px 8px rgba(8, 14, 22, 0.06);
|
--shadow-panel: 0 2px 8px rgba(8, 14, 22, 0.06);
|
||||||
}
|
}
|
||||||
|
|
||||||
:root[data-theme="dark"] {
|
|
||||||
--color-page-bg: #161c25;
|
|
||||||
--color-surface: #1d2531;
|
|
||||||
--color-surface-elevated: #222c39;
|
|
||||||
--color-border: #314052;
|
|
||||||
--color-border-strong: #55739f;
|
|
||||||
--color-text-primary: #e7edf6;
|
|
||||||
--color-text-muted: #9aa9bd;
|
|
||||||
--color-accent: #6aa5ff;
|
|
||||||
--color-accent-contrast: #07192f;
|
|
||||||
--color-selection-bg: #233754;
|
|
||||||
--color-selection-border: #5c8fda;
|
|
||||||
--color-current-row-bg: #1f2d42;
|
|
||||||
--color-current-row-border: #6a87b5;
|
|
||||||
--color-active-pane-border: #78adff;
|
|
||||||
--color-button-bg: #283444;
|
|
||||||
--color-button-hover: #314258;
|
|
||||||
--color-button-secondary-bg: #202935;
|
|
||||||
--color-list-header-bg: rgba(255, 255, 255, 0.02);
|
|
||||||
--color-list-row-hover: rgba(106, 165, 255, 0.08);
|
|
||||||
--color-danger: #ff8e8e;
|
|
||||||
--color-danger-bg: #462328;
|
|
||||||
--color-overlay-bg: rgba(8, 12, 18, 0.62);
|
|
||||||
}
|
|
||||||
|
|
||||||
:root[data-theme="light"] {
|
|
||||||
--color-page-bg: #eef3f9;
|
|
||||||
--color-surface: #ffffff;
|
|
||||||
--color-surface-elevated: #f8fbff;
|
|
||||||
--color-border: #d6e0ec;
|
|
||||||
--color-border-strong: #7ca0d1;
|
|
||||||
--color-text-primary: #172233;
|
|
||||||
--color-text-muted: #617086;
|
|
||||||
--color-accent: #235ec7;
|
|
||||||
--color-accent-contrast: #ffffff;
|
|
||||||
--color-selection-bg: #e5eefc;
|
|
||||||
--color-selection-border: #7b9fdb;
|
|
||||||
--color-current-row-bg: #f1f6ff;
|
|
||||||
--color-current-row-border: #a2bce8;
|
|
||||||
--color-active-pane-border: #235ec7;
|
|
||||||
--color-button-bg: #f6f9fd;
|
|
||||||
--color-button-hover: #edf3fb;
|
|
||||||
--color-button-secondary-bg: #f3f6fb;
|
|
||||||
--color-list-header-bg: #f8fbff;
|
|
||||||
--color-list-row-hover: #f5f9ff;
|
|
||||||
--color-danger: #b42323;
|
|
||||||
--color-danger-bg: #fdecec;
|
|
||||||
--color-overlay-bg: rgba(18, 28, 40, 0.30);
|
|
||||||
}
|
|
||||||
|
|
||||||
:root {
|
:root {
|
||||||
--bg: var(--color-page-bg);
|
--bg: var(--color-page-bg);
|
||||||
--panel: var(--color-surface);
|
--panel: var(--color-surface);
|
||||||
@@ -100,6 +50,20 @@ body {
|
|||||||
box-shadow: 0 1px 0 rgba(255, 255, 255, 0.03);
|
box-shadow: 0 1px 0 rgba(255, 255, 255, 0.03);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#title-brand {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
min-width: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#title-logo {
|
||||||
|
display: block;
|
||||||
|
width: auto;
|
||||||
|
height: 32px;
|
||||||
|
flex: 0 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
#title-zone-actions {
|
#title-zone-actions {
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
@@ -107,6 +71,170 @@ body {
|
|||||||
min-width: 0;
|
min-width: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.header-task-chip-container {
|
||||||
|
position: relative;
|
||||||
|
flex: 0 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-chip {
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
background: var(--color-surface);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
border-radius: 999px;
|
||||||
|
padding: 5px 10px;
|
||||||
|
font: inherit;
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: 600;
|
||||||
|
line-height: 1.2;
|
||||||
|
cursor: pointer;
|
||||||
|
white-space: nowrap;
|
||||||
|
box-shadow: 0 1px 2px rgba(8, 14, 22, 0.08);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-chip:hover,
|
||||||
|
.header-task-chip[aria-expanded="true"] {
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-popover {
|
||||||
|
position: absolute;
|
||||||
|
top: calc(100% + 8px);
|
||||||
|
right: 0;
|
||||||
|
width: min(540px, calc(100vw - 24px));
|
||||||
|
padding: 14px;
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--radius-md);
|
||||||
|
background: var(--color-surface-elevated);
|
||||||
|
box-shadow: var(--shadow-elevated);
|
||||||
|
z-index: 30;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-popover-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: 12px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-link {
|
||||||
|
border: 0;
|
||||||
|
background: none;
|
||||||
|
color: var(--color-accent);
|
||||||
|
padding: 0;
|
||||||
|
font: inherit;
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-popover-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 10px;
|
||||||
|
max-height: 360px;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-item {
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
background: var(--color-surface);
|
||||||
|
padding: 10px 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-item-heading {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-item-title {
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: 700;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-status-badge {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
border-radius: 999px;
|
||||||
|
padding: 2px 8px;
|
||||||
|
font-size: 11px;
|
||||||
|
font-weight: 700;
|
||||||
|
white-space: nowrap;
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
background: var(--color-surface-elevated);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-status-badge.status-queued {
|
||||||
|
background: color-mix(in srgb, var(--color-surface-elevated) 72%, var(--color-accent) 28%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-status-badge.status-running {
|
||||||
|
background: color-mix(in srgb, var(--color-surface-elevated) 60%, var(--color-accent) 40%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-status-badge.status-cancelling {
|
||||||
|
background: color-mix(in srgb, var(--color-surface-elevated) 70%, var(--color-warning, #c08a00) 30%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-status-badge.status-completed {
|
||||||
|
background: color-mix(in srgb, var(--color-surface-elevated) 72%, var(--color-success, #2f855a) 28%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-status-badge.status-cancelled {
|
||||||
|
background: color-mix(in srgb, var(--color-surface-elevated) 78%, var(--color-text-muted) 22%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-status-badge.status-failed {
|
||||||
|
background: color-mix(in srgb, var(--color-surface-elevated) 68%, var(--color-danger) 32%);
|
||||||
|
color: var(--color-danger-text, var(--color-text-primary));
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-item-path,
|
||||||
|
.header-task-item-meta,
|
||||||
|
.header-task-item-empty {
|
||||||
|
margin-top: 4px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-item-progress {
|
||||||
|
margin-top: 5px;
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-item-current,
|
||||||
|
.header-task-item-subtext {
|
||||||
|
margin-top: 4px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-item-current {
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-item-actions {
|
||||||
|
margin-top: 8px;
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-task-item-action {
|
||||||
|
min-width: 74px;
|
||||||
|
padding: 4px 8px;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
h1, h2, h3 {
|
h1, h2, h3 {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
@@ -155,9 +283,57 @@ h1 {
|
|||||||
.pane-content {
|
.pane-content {
|
||||||
flex: 1 1 auto;
|
flex: 1 1 auto;
|
||||||
min-height: 0;
|
min-height: 0;
|
||||||
overflow-y: auto;
|
|
||||||
border-top: 1px solid var(--color-border);
|
border-top: 1px solid var(--color-border);
|
||||||
padding-top: 5px;
|
padding-top: 5px;
|
||||||
|
position: relative;
|
||||||
|
background: var(--color-surface);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pane-list-header {
|
||||||
|
flex: 0 0 auto;
|
||||||
|
position: relative;
|
||||||
|
z-index: 2;
|
||||||
|
background: var(--color-surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
.pane-list-scroll {
|
||||||
|
flex: 1 1 auto;
|
||||||
|
min-height: 0;
|
||||||
|
overflow-y: auto;
|
||||||
|
position: relative;
|
||||||
|
z-index: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pane-focus-line {
|
||||||
|
flex: 0 0 auto;
|
||||||
|
min-height: 24px;
|
||||||
|
padding: 5px 8px 3px 8px;
|
||||||
|
border-top: 1px solid var(--color-border);
|
||||||
|
background: var(--color-surface-elevated);
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.25;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
min-width: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pane-focus-name {
|
||||||
|
flex: 1 1 auto;
|
||||||
|
min-width: 0;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pane-focus-selected {
|
||||||
|
flex: 0 0 auto;
|
||||||
|
white-space: nowrap;
|
||||||
|
color: var(--color-text-secondary, var(--color-text-primary));
|
||||||
|
opacity: 0.9;
|
||||||
}
|
}
|
||||||
|
|
||||||
.toolbar {
|
.toolbar {
|
||||||
@@ -173,16 +349,7 @@ h1 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.pane-topbar {
|
.pane-topbar {
|
||||||
justify-content: space-between;
|
justify-content: flex-end;
|
||||||
}
|
|
||||||
|
|
||||||
.pane-title {
|
|
||||||
min-width: 42px;
|
|
||||||
font-size: 12px;
|
|
||||||
font-weight: 700;
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.08em;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.checkbox {
|
.checkbox {
|
||||||
@@ -236,6 +403,30 @@ button:disabled {
|
|||||||
background: var(--color-button-secondary-bg);
|
background: var(--color-button-secondary-bg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.upload-progress {
|
||||||
|
display: grid;
|
||||||
|
gap: 1px;
|
||||||
|
margin-top: 4px;
|
||||||
|
padding: 4px 8px;
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
background: var(--color-surface-elevated);
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.25;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-progress-target,
|
||||||
|
.upload-progress-file {
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-progress-count {
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
#theme-toggle-icon {
|
#theme-toggle-icon {
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
line-height: 1;
|
line-height: 1;
|
||||||
@@ -257,7 +448,7 @@ button:disabled {
|
|||||||
display: flex;
|
display: flex;
|
||||||
flex-wrap: wrap;
|
flex-wrap: wrap;
|
||||||
gap: 4px;
|
gap: 4px;
|
||||||
margin-bottom: 4px;
|
margin-bottom: 2px;
|
||||||
color: var(--color-text-muted);
|
color: var(--color-text-muted);
|
||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
}
|
}
|
||||||
@@ -284,15 +475,22 @@ button:disabled {
|
|||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: minmax(0, 1fr) 88px 138px;
|
grid-template-columns: minmax(0, 1fr) 88px 138px;
|
||||||
gap: 6px;
|
gap: 6px;
|
||||||
padding: 4px 6px 5px 6px;
|
padding: 5px 6px 6px 6px;
|
||||||
border-bottom: 1px solid var(--color-border);
|
border-bottom: 1px solid var(--color-border);
|
||||||
margin-bottom: 3px;
|
margin-bottom: 0;
|
||||||
background: var(--color-list-header-bg);
|
background: var(--color-list-header-bg);
|
||||||
border-radius: var(--radius-sm);
|
border-radius: var(--radius-sm);
|
||||||
color: var(--color-text-muted);
|
color: var(--color-text-muted);
|
||||||
font-size: 11px;
|
font-size: 11px;
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
letter-spacing: 0.05em;
|
letter-spacing: 0.05em;
|
||||||
|
box-shadow:
|
||||||
|
0 1px 0 rgba(0, 0, 0, 0.04),
|
||||||
|
0 8px 10px -10px rgba(0, 0, 0, 0.45);
|
||||||
|
}
|
||||||
|
|
||||||
|
.col-name {
|
||||||
|
padding-left: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.col-size,
|
.col-size,
|
||||||
@@ -326,6 +524,72 @@ button:disabled {
|
|||||||
min-width: 0;
|
min-width: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.entry-select-slot {
|
||||||
|
width: 18px;
|
||||||
|
min-width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.entry-select-toggle {
|
||||||
|
width: 18px;
|
||||||
|
min-width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
padding: 0;
|
||||||
|
border: none;
|
||||||
|
border-radius: 999px;
|
||||||
|
background: transparent;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.entry-select-toggle:hover {
|
||||||
|
background: transparent;
|
||||||
|
border-color: transparent;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.entry-select-toggle.is-disabled {
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.entry-select-indicator {
|
||||||
|
width: 16px;
|
||||||
|
height: 16px;
|
||||||
|
border-radius: 999px;
|
||||||
|
border: 1.5px solid color-mix(in srgb, var(--color-text-muted) 80%, transparent);
|
||||||
|
opacity: 0;
|
||||||
|
transition: opacity 100ms ease, border-color 100ms ease, background 100ms ease;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.list li:hover .entry-select-indicator,
|
||||||
|
.entry-select-toggle:focus-visible .entry-select-indicator,
|
||||||
|
.entry-select-toggle.is-selected .entry-select-indicator {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.entry-select-toggle.is-selected .entry-select-indicator {
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
background: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.entry-select-toggle.is-selected .entry-select-indicator::after {
|
||||||
|
content: "✓";
|
||||||
|
position: absolute;
|
||||||
|
inset: 0;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
font-size: 11px;
|
||||||
|
line-height: 1;
|
||||||
|
color: var(--color-surface);
|
||||||
|
}
|
||||||
|
|
||||||
.entry-media-slot {
|
.entry-media-slot {
|
||||||
width: 28px;
|
width: 28px;
|
||||||
min-width: 28px;
|
min-width: 28px;
|
||||||
@@ -366,6 +630,24 @@ button:disabled {
|
|||||||
stroke-linejoin: round;
|
stroke-linejoin: round;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.entry-media-svg.is-filled {
|
||||||
|
fill: currentColor;
|
||||||
|
stroke: currentColor;
|
||||||
|
}
|
||||||
|
|
||||||
|
.entry-media-svg .entry-media-detail {
|
||||||
|
fill: none;
|
||||||
|
stroke: var(--color-surface);
|
||||||
|
stroke-width: 1.45;
|
||||||
|
stroke-linecap: round;
|
||||||
|
stroke-linejoin: round;
|
||||||
|
}
|
||||||
|
|
||||||
|
.entry-media-svg .entry-media-detail-solid {
|
||||||
|
fill: var(--color-surface);
|
||||||
|
stroke: none;
|
||||||
|
}
|
||||||
|
|
||||||
.entry-media-icon.folder {
|
.entry-media-icon.folder {
|
||||||
color: color-mix(in srgb, #d1a85e 72%, var(--color-text-muted));
|
color: color-mix(in srgb, #d1a85e 72%, var(--color-text-muted));
|
||||||
}
|
}
|
||||||
@@ -517,15 +799,8 @@ button:disabled {
|
|||||||
#footer-bar {
|
#footer-bar {
|
||||||
border-top: 1px solid var(--color-border);
|
border-top: 1px solid var(--color-border);
|
||||||
background: var(--color-surface-elevated);
|
background: var(--color-surface-elevated);
|
||||||
padding: 5px 12px 4px 12px;
|
padding: 6px 12px 5px 12px;
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
|
||||||
align-items: center;
|
|
||||||
gap: 3px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#function-bar-meta {
|
|
||||||
margin-bottom: 0;
|
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -534,7 +809,8 @@ button:disabled {
|
|||||||
justify-content: center;
|
justify-content: center;
|
||||||
gap: 5px;
|
gap: 5px;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
max-width: 760px;
|
max-width: none;
|
||||||
|
flex-wrap: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
#function-bar button {
|
#function-bar button {
|
||||||
@@ -546,6 +822,50 @@ button:disabled {
|
|||||||
justify-content: center;
|
justify-content: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.split-button {
|
||||||
|
position: relative;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: stretch;
|
||||||
|
}
|
||||||
|
|
||||||
|
.split-button > button {
|
||||||
|
min-width: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#upload-btn {
|
||||||
|
border-top-right-radius: 0;
|
||||||
|
border-bottom-right-radius: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#upload-menu-toggle {
|
||||||
|
min-width: 32px;
|
||||||
|
padding-left: 7px;
|
||||||
|
padding-right: 7px;
|
||||||
|
border-left: 1px solid var(--color-border);
|
||||||
|
border-top-left-radius: 0;
|
||||||
|
border-bottom-left-radius: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.split-menu {
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
bottom: calc(100% + 6px);
|
||||||
|
min-width: 148px;
|
||||||
|
display: grid;
|
||||||
|
gap: 4px;
|
||||||
|
padding: 6px;
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
background: var(--color-surface-elevated);
|
||||||
|
box-shadow: var(--shadow-elevated);
|
||||||
|
z-index: 20;
|
||||||
|
}
|
||||||
|
|
||||||
|
.split-menu button {
|
||||||
|
width: 100%;
|
||||||
|
justify-content: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
.shortcut-hint {
|
.shortcut-hint {
|
||||||
color: var(--color-text-muted);
|
color: var(--color-text-muted);
|
||||||
font-size: 10px;
|
font-size: 10px;
|
||||||
@@ -581,6 +901,103 @@ button:disabled {
|
|||||||
box-shadow: var(--shadow-elevated);
|
box-shadow: var(--shadow-elevated);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.feedback-card {
|
||||||
|
width: min(440px, calc(100vw - 24px));
|
||||||
|
}
|
||||||
|
|
||||||
|
.context-menu {
|
||||||
|
position: fixed;
|
||||||
|
min-width: 220px;
|
||||||
|
padding: 8px;
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
background: var(--color-surface-elevated);
|
||||||
|
box-shadow: var(--shadow-elevated);
|
||||||
|
z-index: 1100;
|
||||||
|
}
|
||||||
|
|
||||||
|
.context-menu-scope {
|
||||||
|
font-size: 11px;
|
||||||
|
font-weight: 700;
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
text-transform: uppercase;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.context-menu-target {
|
||||||
|
margin-top: 4px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.context-menu-separator {
|
||||||
|
height: 1px;
|
||||||
|
margin: 8px 0;
|
||||||
|
background: var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.context-menu button {
|
||||||
|
width: 100%;
|
||||||
|
justify-content: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
#upload-modal .popup-card {
|
||||||
|
max-width: 320px;
|
||||||
|
padding: 12px 14px;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
#download-modal .popup-card {
|
||||||
|
max-width: 320px;
|
||||||
|
padding: 12px 14px;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-modal-progress {
|
||||||
|
width: 100%;
|
||||||
|
height: 4px;
|
||||||
|
border-radius: 999px;
|
||||||
|
background: var(--color-border);
|
||||||
|
margin: 6px 0;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-modal-progress-bar {
|
||||||
|
height: 100%;
|
||||||
|
width: 0;
|
||||||
|
background: var(--color-accent);
|
||||||
|
transition: width 150ms ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.upload-modal-count {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-modal-progress {
|
||||||
|
width: 100%;
|
||||||
|
height: 4px;
|
||||||
|
border-radius: 999px;
|
||||||
|
background: var(--color-border);
|
||||||
|
margin: 6px 0;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-modal-progress-bar {
|
||||||
|
height: 100%;
|
||||||
|
width: 0;
|
||||||
|
background: var(--color-accent);
|
||||||
|
transition: width 150ms ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-modal-count {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
.popup-meta {
|
.popup-meta {
|
||||||
color: var(--color-text-muted);
|
color: var(--color-text-muted);
|
||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
@@ -660,6 +1077,36 @@ button:disabled {
|
|||||||
border: 1px solid var(--color-border);
|
border: 1px solid var(--color-border);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.image-card {
|
||||||
|
width: min(1100px, calc(100vw - 28px));
|
||||||
|
}
|
||||||
|
|
||||||
|
.image-toolbar {
|
||||||
|
display: flex;
|
||||||
|
gap: 8px;
|
||||||
|
margin: 8px 0 8px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.image-viewport {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
min-height: 420px;
|
||||||
|
height: calc(100vh - 240px);
|
||||||
|
overflow: auto;
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
background: color-mix(in srgb, var(--color-surface) 88%, black 12%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.image-viewer-img {
|
||||||
|
max-width: none;
|
||||||
|
max-height: none;
|
||||||
|
transform-origin: center center;
|
||||||
|
transition: transform 120ms ease;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
.search-card {
|
.search-card {
|
||||||
width: min(680px, calc(100vw - 32px));
|
width: min(680px, calc(100vw - 32px));
|
||||||
}
|
}
|
||||||
@@ -733,7 +1180,7 @@ button:disabled {
|
|||||||
|
|
||||||
.settings-card {
|
.settings-card {
|
||||||
position: relative;
|
position: relative;
|
||||||
width: min(760px, calc(100vw - 32px));
|
width: min(1180px, calc(100vw - 32px));
|
||||||
max-height: calc(100vh - 56px);
|
max-height: calc(100vh - 56px);
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
@@ -760,12 +1207,53 @@ button:disabled {
|
|||||||
min-height: 180px;
|
min-height: 180px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.settings-readonly-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 8px;
|
||||||
|
margin-top: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-readonly-item {
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
background: var(--color-surface);
|
||||||
|
padding: 8px 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-readonly-label {
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: 700;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
margin-bottom: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-readonly-value {
|
||||||
|
font-size: 14px;
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
.settings-placeholder-title {
|
.settings-placeholder-title {
|
||||||
font-size: 13px;
|
font-size: 13px;
|
||||||
font-weight: 700;
|
font-weight: 700;
|
||||||
margin-bottom: 8px;
|
margin-bottom: 8px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.settings-activity-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: minmax(0, 1fr) minmax(0, 1fr);
|
||||||
|
gap: 14px;
|
||||||
|
align-items: start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.settings-activity-panel {
|
||||||
|
min-width: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
.settings-log-list {
|
.settings-log-list {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
@@ -811,6 +1299,18 @@ button:disabled {
|
|||||||
color: var(--color-danger);
|
color: var(--color-danger);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@media (max-width: 900px) {
|
||||||
|
.settings-activity-grid {
|
||||||
|
grid-template-columns: minmax(0, 1fr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 720px) {
|
||||||
|
.settings-card {
|
||||||
|
width: min(1180px, calc(100vw - 20px));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.viewer-close {
|
.viewer-close {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
top: 10px;
|
top: 10px;
|
||||||
+207
-9
@@ -4,14 +4,46 @@
|
|||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
<title>WebManager v2</title>
|
<title>WebManager v2</title>
|
||||||
<link rel="stylesheet" href="/ui/style.css">
|
<link rel="icon" href="/ui/assets/img/favicon.svg" type="image/svg+xml">
|
||||||
|
<link rel="stylesheet" href="/ui/base.css">
|
||||||
|
<link rel="stylesheet" href="/ui/theme-default.css">
|
||||||
|
<link rel="stylesheet" href="/ui/theme-macos-soft.css">
|
||||||
|
<link rel="stylesheet" href="/ui/theme-midnight.css">
|
||||||
|
<link rel="stylesheet" href="/ui/theme-graphite.css">
|
||||||
|
<link rel="stylesheet" href="/ui/theme-windows11.css">
|
||||||
|
<link rel="stylesheet" href="/ui/theme-commander-electric.css">
|
||||||
|
<link rel="stylesheet" href="/ui/theme-nord-arctic.css">
|
||||||
|
<link rel="stylesheet" href="/ui/theme-catppuccin-soft.css">
|
||||||
|
<link rel="stylesheet" href="/ui/theme-fluent-neon.css">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="app-shell">
|
<div id="app-shell">
|
||||||
<header id="title-zone">
|
<header id="title-zone">
|
||||||
|
<div id="title-brand">
|
||||||
|
<img id="title-logo" src="/ui/assets/img/logo.svg" alt="">
|
||||||
<h1>WebManager v2</h1>
|
<h1>WebManager v2</h1>
|
||||||
|
</div>
|
||||||
<div id="title-zone-actions">
|
<div id="title-zone-actions">
|
||||||
<div id="status"></div>
|
<div id="status"></div>
|
||||||
|
<div id="header-task-chip-container" class="header-task-chip-container hidden">
|
||||||
|
<button
|
||||||
|
id="header-task-chip-btn"
|
||||||
|
type="button"
|
||||||
|
class="header-task-chip"
|
||||||
|
aria-haspopup="dialog"
|
||||||
|
aria-expanded="false"
|
||||||
|
aria-controls="header-task-popover"
|
||||||
|
>
|
||||||
|
<span id="header-task-chip-label">1 active operation</span>
|
||||||
|
</button>
|
||||||
|
<div id="header-task-popover" class="header-task-popover hidden" role="dialog" aria-label="Active operations">
|
||||||
|
<div class="header-task-popover-header">
|
||||||
|
<strong>Active operations</strong>
|
||||||
|
<button id="header-task-logs-btn" type="button" class="header-task-link">View in Logs</button>
|
||||||
|
</div>
|
||||||
|
<div id="header-task-popover-list" class="header-task-popover-list"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<button id="theme-toggle" type="button" aria-label="Toggle theme" title="Toggle theme">
|
<button id="theme-toggle" type="button" aria-label="Toggle theme" title="Toggle theme">
|
||||||
<span id="theme-toggle-icon" aria-hidden="true">☾</span>
|
<span id="theme-toggle-icon" aria-hidden="true">☾</span>
|
||||||
</button>
|
</button>
|
||||||
@@ -22,51 +54,69 @@
|
|||||||
<section class="panel pane" id="left-pane" data-pane="left">
|
<section class="panel pane" id="left-pane" data-pane="left">
|
||||||
<div class="pane-header">
|
<div class="pane-header">
|
||||||
<div class="toolbar compact-toolbar pane-topbar">
|
<div class="toolbar compact-toolbar pane-topbar">
|
||||||
<h2 class="pane-title">Left</h2>
|
|
||||||
<label class="checkbox"><input id="left-hidden-toggle" type="checkbox">Hidden</label>
|
<label class="checkbox"><input id="left-hidden-toggle" type="checkbox">Hidden</label>
|
||||||
</div>
|
</div>
|
||||||
<div class="pathline compact-line">C:<code id="left-current-path"></code></div>
|
|
||||||
<nav id="left-breadcrumbs" class="breadcrumbs" aria-label="Left breadcrumb"></nav>
|
<nav id="left-breadcrumbs" class="breadcrumbs" aria-label="Left breadcrumb"></nav>
|
||||||
<div id="left-browse-error" class="error"></div>
|
<div id="left-browse-error" class="error"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="pane-content">
|
<div class="pane-content">
|
||||||
|
<div class="pane-list-header">
|
||||||
<div class="list-grid-header">
|
<div class="list-grid-header">
|
||||||
<span class="col-sel"></span>
|
|
||||||
<span class="col-name">Name</span>
|
<span class="col-name">Name</span>
|
||||||
<span class="col-size">Size</span>
|
<span class="col-size">Size</span>
|
||||||
<span class="col-modified">Modified</span>
|
<span class="col-modified">Modified</span>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="pane-list-scroll">
|
||||||
<ul id="left-items" class="list"></ul>
|
<ul id="left-items" class="list"></ul>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
<div id="left-focus-line" class="pane-focus-line" aria-live="polite">
|
||||||
|
<span id="left-focus-name" class="pane-focus-name">—</span>
|
||||||
|
<span id="left-focus-selected" class="pane-focus-selected hidden"></span>
|
||||||
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section class="panel pane" id="right-pane" data-pane="right">
|
<section class="panel pane" id="right-pane" data-pane="right">
|
||||||
<div class="pane-header">
|
<div class="pane-header">
|
||||||
<div class="toolbar compact-toolbar pane-topbar">
|
<div class="toolbar compact-toolbar pane-topbar">
|
||||||
<h2 class="pane-title">Right</h2>
|
|
||||||
<label class="checkbox"><input id="right-hidden-toggle" type="checkbox">Hidden</label>
|
<label class="checkbox"><input id="right-hidden-toggle" type="checkbox">Hidden</label>
|
||||||
</div>
|
</div>
|
||||||
<div class="pathline compact-line">C:<code id="right-current-path"></code></div>
|
|
||||||
<nav id="right-breadcrumbs" class="breadcrumbs" aria-label="Right breadcrumb"></nav>
|
<nav id="right-breadcrumbs" class="breadcrumbs" aria-label="Right breadcrumb"></nav>
|
||||||
<div id="right-browse-error" class="error"></div>
|
<div id="right-browse-error" class="error"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="pane-content">
|
<div class="pane-content">
|
||||||
|
<div class="pane-list-header">
|
||||||
<div class="list-grid-header">
|
<div class="list-grid-header">
|
||||||
<span class="col-sel"></span>
|
|
||||||
<span class="col-name">Name</span>
|
<span class="col-name">Name</span>
|
||||||
<span class="col-size">Size</span>
|
<span class="col-size">Size</span>
|
||||||
<span class="col-modified">Modified</span>
|
<span class="col-modified">Modified</span>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="pane-list-scroll">
|
||||||
<ul id="right-items" class="list"></ul>
|
<ul id="right-items" class="list"></ul>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
<div id="right-focus-line" class="pane-focus-line" aria-live="polite">
|
||||||
|
<span id="right-focus-name" class="pane-focus-name">—</span>
|
||||||
|
<span id="right-focus-selected" class="pane-focus-selected hidden"></span>
|
||||||
|
</div>
|
||||||
</section>
|
</section>
|
||||||
</main>
|
</main>
|
||||||
|
|
||||||
<section id="footer-bar">
|
<section id="footer-bar">
|
||||||
<div id="function-bar-meta" class="pathline compact-line">Active:<code id="active-pane-label">left</code></div>
|
|
||||||
<div id="function-bar" class="toolbar compact-toolbar">
|
<div id="function-bar" class="toolbar compact-toolbar">
|
||||||
|
<div id="upload-menu" class="split-button upload-menu">
|
||||||
|
<button id="upload-btn" type="button"><span>Upload Files</span></button>
|
||||||
|
<button id="upload-menu-toggle" type="button" aria-haspopup="menu" aria-expanded="false" aria-label="Upload options">
|
||||||
|
<span aria-hidden="true">▾</span>
|
||||||
|
</button>
|
||||||
|
<div id="upload-menu-popup" class="split-menu hidden" role="menu" aria-label="Upload options">
|
||||||
|
<button id="upload-folder-btn" type="button" role="menuitem"><span>Upload Folder</span></button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<button id="settings-btn" type="button"><span class="shortcut-hint">F1</span><span>Settings</span></button>
|
<button id="settings-btn" type="button"><span class="shortcut-hint">F1</span><span>Settings</span></button>
|
||||||
<button id="rename-btn" type="button" disabled><span class="shortcut-hint">F2</span><span>Rename</span></button>
|
<button id="rename-btn" type="button" disabled><span class="shortcut-hint">F2</span><span>Rename</span></button>
|
||||||
<button id="view-btn" type="button" disabled><span class="shortcut-hint">F3</span><span>View</span></button>
|
<button id="view-btn" type="button" disabled><span class="shortcut-hint">F3</span><span>View</span></button>
|
||||||
@@ -76,16 +126,62 @@
|
|||||||
<button id="mkdir-btn" type="button"><span class="shortcut-hint">F7</span><span>MKdir</span></button>
|
<button id="mkdir-btn" type="button"><span class="shortcut-hint">F7</span><span>MKdir</span></button>
|
||||||
<button id="delete-btn" type="button" disabled><span class="shortcut-hint">F8</span><span>Delete</span></button>
|
<button id="delete-btn" type="button" disabled><span class="shortcut-hint">F8</span><span>Delete</span></button>
|
||||||
</div>
|
</div>
|
||||||
|
<input id="upload-input" type="file" multiple hidden>
|
||||||
<div id="actions-error" class="error"></div>
|
<div id="actions-error" class="error"></div>
|
||||||
</section>
|
</section>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div id="feedback-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="feedback-title">
|
||||||
|
<div class="popup-card feedback-card">
|
||||||
|
<h3 id="feedback-title">Action feedback</h3>
|
||||||
|
<div id="feedback-message" class="popup-meta"></div>
|
||||||
|
<div class="popup-actions">
|
||||||
|
<button id="feedback-close-btn" type="button">Close</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="download-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="download-modal-title">
|
||||||
|
<div class="popup-card download-modal-card">
|
||||||
|
<h3 id="download-modal-title">Preparing download</h3>
|
||||||
|
<div id="download-modal-target" class="popup-meta"></div>
|
||||||
|
<div id="download-modal-current-file" class="popup-meta"></div>
|
||||||
|
<div class="download-modal-progress">
|
||||||
|
<div id="download-modal-progress-bar" class="download-modal-progress-bar"></div>
|
||||||
|
</div>
|
||||||
|
<div id="download-modal-count" class="download-modal-count"></div>
|
||||||
|
<div id="download-modal-status" class="popup-meta"></div>
|
||||||
|
<div class="popup-actions">
|
||||||
|
<button id="download-modal-logs-btn" type="button">View in Logs</button>
|
||||||
|
<button id="download-modal-cancel-btn" type="button" class="hidden">Cancel</button>
|
||||||
|
<button id="download-modal-close-btn" type="button">Close</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="context-menu" class="context-menu hidden" role="menu" aria-label="Item context menu">
|
||||||
|
<div id="context-menu-scope" class="context-menu-scope"></div>
|
||||||
|
<div id="context-menu-target" class="context-menu-target"></div>
|
||||||
|
<div class="context-menu-separator"></div>
|
||||||
|
<button id="context-menu-open-btn" type="button" role="menuitem">Open</button>
|
||||||
|
<button id="context-menu-edit-btn" type="button" role="menuitem">Edit</button>
|
||||||
|
<button id="context-menu-download-btn" type="button" role="menuitem">Download</button>
|
||||||
|
<button id="context-menu-rename-btn" type="button" role="menuitem">Rename</button>
|
||||||
|
<button id="context-menu-duplicate-btn" type="button" role="menuitem">Duplicate</button>
|
||||||
|
<button id="context-menu-copy-btn" type="button" role="menuitem">Copy</button>
|
||||||
|
<button id="context-menu-move-btn" type="button" role="menuitem">Move</button>
|
||||||
|
<button id="context-menu-delete-btn" type="button" role="menuitem">Delete</button>
|
||||||
|
<button id="context-menu-properties-btn" type="button" role="menuitem">Properties</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div id="settings-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="settings-title">
|
<div id="settings-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="settings-title">
|
||||||
<div class="popup-card settings-card">
|
<div class="popup-card settings-card">
|
||||||
<button id="settings-close-btn" class="viewer-close" type="button" aria-label="Close settings">X</button>
|
<button id="settings-close-btn" class="viewer-close" type="button" aria-label="Close settings">X</button>
|
||||||
<h3 id="settings-title">Settings</h3>
|
<h3 id="settings-title">Settings</h3>
|
||||||
<div class="settings-tabs" role="tablist" aria-label="Settings tabs">
|
<div class="settings-tabs" role="tablist" aria-label="Settings tabs">
|
||||||
<button id="settings-general-tab" class="settings-tab is-active" type="button" role="tab" aria-selected="true">General</button>
|
<button id="settings-general-tab" class="settings-tab is-active" type="button" role="tab" aria-selected="true">General</button>
|
||||||
|
<button id="settings-interface-tab" class="settings-tab" type="button" role="tab" aria-selected="false">Interface</button>
|
||||||
|
<button id="settings-downloads-tab" class="settings-tab" type="button" role="tab" aria-selected="false">Downloads</button>
|
||||||
<button id="settings-logs-tab" class="settings-tab" type="button" role="tab" aria-selected="false">Logs</button>
|
<button id="settings-logs-tab" class="settings-tab" type="button" role="tab" aria-selected="false">Logs</button>
|
||||||
</div>
|
</div>
|
||||||
<section id="settings-general-panel" class="settings-panel" role="tabpanel" aria-labelledby="settings-general-tab">
|
<section id="settings-general-panel" class="settings-panel" role="tabpanel" aria-labelledby="settings-general-tab">
|
||||||
@@ -108,13 +204,84 @@
|
|||||||
<button id="settings-general-save-btn" type="button">Save</button>
|
<button id="settings-general-save-btn" type="button">Save</button>
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
<section id="settings-interface-panel" class="settings-panel hidden" role="tabpanel" aria-labelledby="settings-interface-tab">
|
||||||
|
<div class="settings-placeholder-title">Interface</div>
|
||||||
|
<label class="settings-field" for="settings-selected-theme">
|
||||||
|
<span>Theme</span>
|
||||||
|
<select id="settings-selected-theme">
|
||||||
|
<option value="default">Default</option>
|
||||||
|
<option value="macos-soft">macOS Soft</option>
|
||||||
|
<option value="midnight">Midnight</option>
|
||||||
|
<option value="graphite">Graphite</option>
|
||||||
|
<option value="windows11">Windows 11</option>
|
||||||
|
<option value="commander-electric">Commander Electric</option>
|
||||||
|
<option value="nord-arctic">Nord Arctic</option>
|
||||||
|
<option value="catppuccin-soft">Catppuccin Soft</option>
|
||||||
|
<option value="fluent-neon">Fluent Neon</option>
|
||||||
|
</select>
|
||||||
|
</label>
|
||||||
|
<div id="settings-interface-error" class="error"></div>
|
||||||
|
<div class="settings-actions">
|
||||||
|
<button id="settings-interface-save-btn" type="button">Save</button>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
<section id="settings-downloads-panel" class="settings-panel hidden" role="tabpanel" aria-labelledby="settings-downloads-tab">
|
||||||
|
<div class="settings-placeholder-title">Downloads</div>
|
||||||
|
<div class="popup-meta">ZIP download limits are shown for reference and cannot be changed here.</div>
|
||||||
|
<div class="settings-readonly-list">
|
||||||
|
<div class="settings-readonly-item">
|
||||||
|
<div class="settings-readonly-label">Max items</div>
|
||||||
|
<div id="settings-download-max-items" class="settings-readonly-value"></div>
|
||||||
|
</div>
|
||||||
|
<div class="settings-readonly-item">
|
||||||
|
<div class="settings-readonly-label">Max total input size</div>
|
||||||
|
<div id="settings-download-max-total-size" class="settings-readonly-value"></div>
|
||||||
|
</div>
|
||||||
|
<div class="settings-readonly-item">
|
||||||
|
<div class="settings-readonly-label">Max individual file size</div>
|
||||||
|
<div id="settings-download-max-file-size" class="settings-readonly-value"></div>
|
||||||
|
</div>
|
||||||
|
<div class="settings-readonly-item">
|
||||||
|
<div class="settings-readonly-label">Scan timeout</div>
|
||||||
|
<div id="settings-download-scan-timeout" class="settings-readonly-value"></div>
|
||||||
|
</div>
|
||||||
|
<div class="settings-readonly-item">
|
||||||
|
<div class="settings-readonly-label">Symlink policy</div>
|
||||||
|
<div id="settings-download-symlink-policy" class="settings-readonly-value"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
<section id="settings-logs-panel" class="settings-panel hidden" role="tabpanel" aria-labelledby="settings-logs-tab">
|
<section id="settings-logs-panel" class="settings-panel hidden" role="tabpanel" aria-labelledby="settings-logs-tab">
|
||||||
|
<div class="settings-activity-grid">
|
||||||
|
<section class="settings-activity-panel" aria-labelledby="settings-tasks-title">
|
||||||
|
<div id="settings-tasks-title" class="settings-placeholder-title">Tasks</div>
|
||||||
|
<div id="settings-tasks-list" class="settings-log-list"></div>
|
||||||
|
</section>
|
||||||
|
<section class="settings-activity-panel" aria-labelledby="settings-history-title">
|
||||||
|
<div id="settings-history-title" class="settings-placeholder-title">History</div>
|
||||||
<div id="settings-logs-error" class="error"></div>
|
<div id="settings-logs-error" class="error"></div>
|
||||||
<div id="settings-logs-list" class="settings-log-list"></div>
|
<div id="settings-logs-list" class="settings-log-list"></div>
|
||||||
</section>
|
</section>
|
||||||
</div>
|
</div>
|
||||||
|
</section>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div id="upload-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="upload-modal-title">
|
||||||
|
<div class="popup-card upload-modal-card">
|
||||||
|
<h3 id="upload-modal-title">Uploading</h3>
|
||||||
|
<div id="upload-modal-target" class="popup-meta"></div>
|
||||||
|
<div id="upload-modal-current-file" class="popup-meta"></div>
|
||||||
|
<div class="upload-modal-progress">
|
||||||
|
<div id="upload-modal-progress-bar" class="upload-modal-progress-bar"></div>
|
||||||
|
</div>
|
||||||
|
<div id="upload-modal-count" class="upload-modal-count"></div>
|
||||||
|
<div id="upload-modal-status" class="popup-meta"></div>
|
||||||
|
<div class="popup-actions">
|
||||||
|
<button id="upload-modal-cancel-btn" type="button">Cancel</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div id="search-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="search-title">
|
<div id="search-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="search-title">
|
||||||
<div class="popup-card search-card">
|
<div class="popup-card search-card">
|
||||||
<button id="search-close-btn" class="viewer-close" type="button" aria-label="Close search">X</button>
|
<button id="search-close-btn" class="viewer-close" type="button" aria-label="Close search">X</button>
|
||||||
@@ -169,7 +336,7 @@
|
|||||||
<div class="popup-card">
|
<div class="popup-card">
|
||||||
<button id="rename-close-btn" class="viewer-close" type="button" aria-label="Close rename">X</button>
|
<button id="rename-close-btn" class="viewer-close" type="button" aria-label="Close rename">X</button>
|
||||||
<h3 id="rename-title">Rename</h3>
|
<h3 id="rename-title">Rename</h3>
|
||||||
<label for="rename-input" class="popup-label">Name</label>
|
<label id="rename-label" for="rename-input" class="popup-label">Name</label>
|
||||||
<input id="rename-input" type="text" autocomplete="off">
|
<input id="rename-input" type="text" autocomplete="off">
|
||||||
<div id="rename-error" class="error"></div>
|
<div id="rename-error" class="error"></div>
|
||||||
<div class="popup-actions">
|
<div class="popup-actions">
|
||||||
@@ -192,6 +359,19 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div id="delete-confirm-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="delete-confirm-title">
|
||||||
|
<div class="popup-card">
|
||||||
|
<h3 id="delete-confirm-title">Delete folder and contents?</h3>
|
||||||
|
<div id="delete-confirm-message" class="popup-meta">This will permanently delete the folder and all files and subfolders inside it.</div>
|
||||||
|
<div id="delete-confirm-path" class="popup-meta"></div>
|
||||||
|
<div id="delete-confirm-error" class="error"></div>
|
||||||
|
<div class="popup-actions">
|
||||||
|
<button id="delete-confirm-apply-btn" type="button">Delete</button>
|
||||||
|
<button id="delete-confirm-cancel-btn" type="button">Cancel</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div id="viewer-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="viewer-title">
|
<div id="viewer-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="viewer-title">
|
||||||
<div class="popup-card viewer-card">
|
<div class="popup-card viewer-card">
|
||||||
<button id="viewer-close-btn" class="viewer-close" type="button" aria-label="Close viewer">X</button>
|
<button id="viewer-close-btn" class="viewer-close" type="button" aria-label="Close viewer">X</button>
|
||||||
@@ -225,6 +405,24 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div id="image-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="image-title">
|
||||||
|
<div class="popup-card viewer-card image-card">
|
||||||
|
<button id="image-close-btn" class="viewer-close" type="button" aria-label="Close image">X</button>
|
||||||
|
<h3 id="image-title">Image</h3>
|
||||||
|
<div id="image-file-name" class="popup-meta"></div>
|
||||||
|
<div id="image-file-path" class="popup-meta"></div>
|
||||||
|
<div class="image-toolbar">
|
||||||
|
<button id="image-zoom-out-btn" type="button">Zoom out</button>
|
||||||
|
<button id="image-reset-btn" type="button">Reset</button>
|
||||||
|
<button id="image-zoom-in-btn" type="button">Zoom in</button>
|
||||||
|
</div>
|
||||||
|
<div id="image-error" class="error"></div>
|
||||||
|
<div id="image-viewport" class="image-viewport">
|
||||||
|
<img id="image-viewer-img" class="image-viewer-img" alt="">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div id="editor-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="editor-title">
|
<div id="editor-modal" class="popup-overlay hidden" role="dialog" aria-modal="true" aria-labelledby="editor-title">
|
||||||
<div class="popup-card viewer-card editor-card">
|
<div class="popup-card viewer-card editor-card">
|
||||||
<button id="editor-close-btn" class="viewer-close" type="button" aria-label="Close editor">X</button>
|
<button id="editor-close-btn" class="viewer-close" type="button" aria-label="Close editor">X</button>
|
||||||
|
|||||||
@@ -0,0 +1,49 @@
|
|||||||
|
:root[data-theme-family="catppuccin-soft"][data-color-mode="dark"] {
|
||||||
|
--color-page-bg: #1e1f2c;
|
||||||
|
--color-surface: #2b2d42;
|
||||||
|
--color-surface-elevated: #353853;
|
||||||
|
--color-border: #585b7b;
|
||||||
|
--color-border-strong: #b4befe;
|
||||||
|
--color-text-primary: #f4f1fb;
|
||||||
|
--color-text-muted: #c4bedb;
|
||||||
|
--color-accent: #f5c2e7;
|
||||||
|
--color-accent-contrast: #44253c;
|
||||||
|
--color-selection-bg: #4a466a;
|
||||||
|
--color-selection-border: #f2a6d9;
|
||||||
|
--color-current-row-bg: #3d3a58;
|
||||||
|
--color-current-row-border: #89dceb;
|
||||||
|
--color-active-pane-border: #89dceb;
|
||||||
|
--color-button-bg: #44425f;
|
||||||
|
--color-button-hover: #514e71;
|
||||||
|
--color-button-secondary-bg: #373550;
|
||||||
|
--color-list-header-bg: rgba(245, 194, 231, 0.08);
|
||||||
|
--color-list-row-hover: rgba(245, 194, 231, 0.08);
|
||||||
|
--color-danger: #f2a6b3;
|
||||||
|
--color-danger-bg: #5b3040;
|
||||||
|
--color-overlay-bg: rgba(12, 10, 20, 0.62);
|
||||||
|
}
|
||||||
|
|
||||||
|
:root[data-theme-family="catppuccin-soft"][data-color-mode="light"] {
|
||||||
|
--color-page-bg: #f7f2fb;
|
||||||
|
--color-surface: #fff9fd;
|
||||||
|
--color-surface-elevated: #f4ebf7;
|
||||||
|
--color-border: #dccde6;
|
||||||
|
--color-border-strong: #c49ecf;
|
||||||
|
--color-text-primary: #3d314c;
|
||||||
|
--color-text-muted: #7d6d8f;
|
||||||
|
--color-accent: #b85ba0;
|
||||||
|
--color-accent-contrast: #ffffff;
|
||||||
|
--color-selection-bg: #f0dff1;
|
||||||
|
--color-selection-border: #d695c5;
|
||||||
|
--color-current-row-bg: #f7ebf4;
|
||||||
|
--color-current-row-border: #97ccd6;
|
||||||
|
--color-active-pane-border: #97ccd6;
|
||||||
|
--color-button-bg: #f2e5f0;
|
||||||
|
--color-button-hover: #ead8e6;
|
||||||
|
--color-button-secondary-bg: #f8f0f7;
|
||||||
|
--color-list-header-bg: rgba(184, 91, 160, 0.05);
|
||||||
|
--color-list-row-hover: rgba(184, 91, 160, 0.05);
|
||||||
|
--color-danger: #b34d69;
|
||||||
|
--color-danger-bg: #fbe7ec;
|
||||||
|
--color-overlay-bg: rgba(35, 22, 44, 0.22);
|
||||||
|
}
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
:root[data-theme-family="commander-electric"][data-color-mode="dark"] {
|
||||||
|
--color-page-bg: #08111f;
|
||||||
|
--color-surface: #0e2344;
|
||||||
|
--color-surface-elevated: #13305f;
|
||||||
|
--color-border: #29528f;
|
||||||
|
--color-border-strong: #67b8ff;
|
||||||
|
--color-text-primary: #edf6ff;
|
||||||
|
--color-text-muted: #9fc1ea;
|
||||||
|
--color-accent: #53d5ff;
|
||||||
|
--color-accent-contrast: #032239;
|
||||||
|
--color-selection-bg: #184784;
|
||||||
|
--color-selection-border: #66d6ff;
|
||||||
|
--color-current-row-bg: #123563;
|
||||||
|
--color-current-row-border: #ffe36a;
|
||||||
|
--color-active-pane-border: #ffe36a;
|
||||||
|
--color-button-bg: #174177;
|
||||||
|
--color-button-hover: #205394;
|
||||||
|
--color-button-secondary-bg: #102d58;
|
||||||
|
--color-list-header-bg: rgba(83, 213, 255, 0.08);
|
||||||
|
--color-list-row-hover: rgba(83, 213, 255, 0.1);
|
||||||
|
--color-danger: #ffb1a1;
|
||||||
|
--color-danger-bg: #5f261f;
|
||||||
|
--color-overlay-bg: rgba(2, 8, 18, 0.7);
|
||||||
|
--shadow-panel: 0 8px 20px rgba(0, 12, 30, 0.24);
|
||||||
|
--shadow-elevated: 0 18px 40px rgba(0, 12, 34, 0.36);
|
||||||
|
}
|
||||||
|
|
||||||
|
:root[data-theme-family="commander-electric"][data-color-mode="light"] {
|
||||||
|
--color-page-bg: #dceaff;
|
||||||
|
--color-surface: #f8fbff;
|
||||||
|
--color-surface-elevated: #dfeeff;
|
||||||
|
--color-border: #8bb0e3;
|
||||||
|
--color-border-strong: #3478d6;
|
||||||
|
--color-text-primary: #0e2344;
|
||||||
|
--color-text-muted: #4f6891;
|
||||||
|
--color-accent: #006fd6;
|
||||||
|
--color-accent-contrast: #ffffff;
|
||||||
|
--color-selection-bg: #cae2ff;
|
||||||
|
--color-selection-border: #4ba5ff;
|
||||||
|
--color-current-row-bg: #e7f2ff;
|
||||||
|
--color-current-row-border: #d0aa19;
|
||||||
|
--color-active-pane-border: #d0aa19;
|
||||||
|
--color-button-bg: #d5e8ff;
|
||||||
|
--color-button-hover: #c0dcff;
|
||||||
|
--color-button-secondary-bg: #e8f2ff;
|
||||||
|
--color-list-header-bg: rgba(0, 111, 214, 0.06);
|
||||||
|
--color-list-row-hover: rgba(0, 111, 214, 0.06);
|
||||||
|
--color-danger: #b23c2f;
|
||||||
|
--color-danger-bg: #fde9e4;
|
||||||
|
--color-overlay-bg: rgba(8, 23, 45, 0.24);
|
||||||
|
--shadow-panel: 0 8px 18px rgba(28, 66, 120, 0.12);
|
||||||
|
--shadow-elevated: 0 18px 36px rgba(28, 66, 120, 0.18);
|
||||||
|
}
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
:root[data-theme-family="default"][data-color-mode="dark"] {
|
||||||
|
--color-page-bg: #161c25;
|
||||||
|
--color-surface: #1d2531;
|
||||||
|
--color-surface-elevated: #222c39;
|
||||||
|
--color-border: #314052;
|
||||||
|
--color-border-strong: #55739f;
|
||||||
|
--color-text-primary: #e7edf6;
|
||||||
|
--color-text-muted: #9aa9bd;
|
||||||
|
--color-accent: #6aa5ff;
|
||||||
|
--color-accent-contrast: #07192f;
|
||||||
|
--color-selection-bg: #233754;
|
||||||
|
--color-selection-border: #5c8fda;
|
||||||
|
--color-current-row-bg: #1f2d42;
|
||||||
|
--color-current-row-border: #6a87b5;
|
||||||
|
--color-active-pane-border: #78adff;
|
||||||
|
--color-button-bg: #283444;
|
||||||
|
--color-button-hover: #314258;
|
||||||
|
--color-button-secondary-bg: #202935;
|
||||||
|
--color-list-header-bg: rgba(255, 255, 255, 0.02);
|
||||||
|
--color-list-row-hover: rgba(106, 165, 255, 0.08);
|
||||||
|
--color-danger: #ff8e8e;
|
||||||
|
--color-danger-bg: #462328;
|
||||||
|
--color-overlay-bg: rgba(8, 12, 18, 0.62);
|
||||||
|
}
|
||||||
|
|
||||||
|
:root[data-theme-family="default"][data-color-mode="light"] {
|
||||||
|
--color-page-bg: #eef3f9;
|
||||||
|
--color-surface: #ffffff;
|
||||||
|
--color-surface-elevated: #f8fbff;
|
||||||
|
--color-border: #d6e0ec;
|
||||||
|
--color-border-strong: #7ca0d1;
|
||||||
|
--color-text-primary: #172233;
|
||||||
|
--color-text-muted: #617086;
|
||||||
|
--color-accent: #235ec7;
|
||||||
|
--color-accent-contrast: #ffffff;
|
||||||
|
--color-selection-bg: #e5eefc;
|
||||||
|
--color-selection-border: #7b9fdb;
|
||||||
|
--color-current-row-bg: #f1f6ff;
|
||||||
|
--color-current-row-border: #a2bce8;
|
||||||
|
--color-active-pane-border: #235ec7;
|
||||||
|
--color-button-bg: #f6f9fd;
|
||||||
|
--color-button-hover: #edf3fb;
|
||||||
|
--color-button-secondary-bg: #f3f6fb;
|
||||||
|
--color-list-header-bg: #f8fbff;
|
||||||
|
--color-list-row-hover: #f5f9ff;
|
||||||
|
--color-danger: #b42323;
|
||||||
|
--color-danger-bg: #fdecec;
|
||||||
|
--color-overlay-bg: rgba(18, 28, 40, 0.30);
|
||||||
|
}
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
:root[data-theme-family="fluent-neon"][data-color-mode="dark"] {
|
||||||
|
--color-page-bg: #11161d;
|
||||||
|
--color-surface: #1a2430;
|
||||||
|
--color-surface-elevated: #202d3c;
|
||||||
|
--color-border: #38506a;
|
||||||
|
--color-border-strong: #67c9ff;
|
||||||
|
--color-text-primary: #edf7ff;
|
||||||
|
--color-text-muted: #9fb4c7;
|
||||||
|
--color-accent: #3fb4ff;
|
||||||
|
--color-accent-contrast: #05233d;
|
||||||
|
--color-selection-bg: #1f4460;
|
||||||
|
--color-selection-border: #70d2ff;
|
||||||
|
--color-current-row-bg: #1a364d;
|
||||||
|
--color-current-row-border: #8fbeff;
|
||||||
|
--color-active-pane-border: #70d2ff;
|
||||||
|
--color-button-bg: #21405a;
|
||||||
|
--color-button-hover: #29506f;
|
||||||
|
--color-button-secondary-bg: #1a3146;
|
||||||
|
--color-list-header-bg: rgba(63, 180, 255, 0.08);
|
||||||
|
--color-list-row-hover: rgba(63, 180, 255, 0.08);
|
||||||
|
--color-danger: #f1a5a5;
|
||||||
|
--color-danger-bg: #53292f;
|
||||||
|
--color-overlay-bg: rgba(8, 12, 18, 0.64);
|
||||||
|
--shadow-panel: 0 10px 24px rgba(0, 20, 40, 0.24);
|
||||||
|
--shadow-elevated: 0 20px 44px rgba(0, 22, 44, 0.34);
|
||||||
|
}
|
||||||
|
|
||||||
|
:root[data-theme-family="fluent-neon"][data-color-mode="light"] {
|
||||||
|
--color-page-bg: #eff7ff;
|
||||||
|
--color-surface: #ffffff;
|
||||||
|
--color-surface-elevated: #e8f3ff;
|
||||||
|
--color-border: #c8ddf4;
|
||||||
|
--color-border-strong: #66b8ff;
|
||||||
|
--color-text-primary: #132536;
|
||||||
|
--color-text-muted: #667f95;
|
||||||
|
--color-accent: #0d89ec;
|
||||||
|
--color-accent-contrast: #ffffff;
|
||||||
|
--color-selection-bg: #dff1ff;
|
||||||
|
--color-selection-border: #6cc6ff;
|
||||||
|
--color-current-row-bg: #edf7ff;
|
||||||
|
--color-current-row-border: #8fbfff;
|
||||||
|
--color-active-pane-border: #4bb4ff;
|
||||||
|
--color-button-bg: #dfeeff;
|
||||||
|
--color-button-hover: #d0e6ff;
|
||||||
|
--color-button-secondary-bg: #edf5ff;
|
||||||
|
--color-list-header-bg: rgba(13, 137, 236, 0.05);
|
||||||
|
--color-list-row-hover: rgba(13, 137, 236, 0.05);
|
||||||
|
--color-danger: #bb3845;
|
||||||
|
--color-danger-bg: #fde8eb;
|
||||||
|
--color-overlay-bg: rgba(18, 34, 50, 0.22);
|
||||||
|
--shadow-panel: 0 10px 22px rgba(33, 102, 176, 0.12);
|
||||||
|
--shadow-elevated: 0 20px 42px rgba(33, 102, 176, 0.18);
|
||||||
|
}
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
:root[data-theme-family="graphite"][data-color-mode="dark"] {
|
||||||
|
--color-page-bg: #171717;
|
||||||
|
--color-surface: #202020;
|
||||||
|
--color-surface-elevated: #272727;
|
||||||
|
--color-border: #3c3c3c;
|
||||||
|
--color-border-strong: #7b7b7b;
|
||||||
|
--color-text-primary: #efefef;
|
||||||
|
--color-text-muted: #ababab;
|
||||||
|
--color-accent: #8ca0b5;
|
||||||
|
--color-accent-contrast: #111111;
|
||||||
|
--color-selection-bg: #313131;
|
||||||
|
--color-selection-border: #8f9bab;
|
||||||
|
--color-current-row-bg: #2a2a2a;
|
||||||
|
--color-current-row-border: #8c8c8c;
|
||||||
|
--color-active-pane-border: #a8b6c4;
|
||||||
|
--color-button-bg: #2d2d2d;
|
||||||
|
--color-button-hover: #353535;
|
||||||
|
--color-button-secondary-bg: #272727;
|
||||||
|
--color-list-header-bg: rgba(255, 255, 255, 0.02);
|
||||||
|
--color-list-row-hover: rgba(255, 255, 255, 0.05);
|
||||||
|
--color-danger: #e19a9a;
|
||||||
|
--color-danger-bg: #472c2c;
|
||||||
|
--color-overlay-bg: rgba(8, 8, 8, 0.64);
|
||||||
|
}
|
||||||
|
|
||||||
|
:root[data-theme-family="graphite"][data-color-mode="light"] {
|
||||||
|
--color-page-bg: #f0f0f0;
|
||||||
|
--color-surface: #ffffff;
|
||||||
|
--color-surface-elevated: #f7f7f7;
|
||||||
|
--color-border: #dbdbdb;
|
||||||
|
--color-border-strong: #a2a2a2;
|
||||||
|
--color-text-primary: #232323;
|
||||||
|
--color-text-muted: #757575;
|
||||||
|
--color-accent: #5f7180;
|
||||||
|
--color-accent-contrast: #ffffff;
|
||||||
|
--color-selection-bg: #ececec;
|
||||||
|
--color-selection-border: #b0bcc6;
|
||||||
|
--color-current-row-bg: #f5f5f5;
|
||||||
|
--color-current-row-border: #c1c1c1;
|
||||||
|
--color-active-pane-border: #7d8f9f;
|
||||||
|
--color-button-bg: #f8f8f8;
|
||||||
|
--color-button-hover: #f0f0f0;
|
||||||
|
--color-button-secondary-bg: #f3f3f3;
|
||||||
|
--color-list-header-bg: #f8f8f8;
|
||||||
|
--color-list-row-hover: #f4f4f4;
|
||||||
|
--color-danger: #ab3333;
|
||||||
|
--color-danger-bg: #fdeeee;
|
||||||
|
--color-overlay-bg: rgba(24, 24, 24, 0.26);
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user