Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 54e56ab0d8 | |||
| 9778dc6c33 | |||
| 2fa4a0b291 | |||
| 4062cbf6c8 | |||
| 841318c9e2 | |||
| 684f52be4d |
@@ -18,7 +18,7 @@ RUN mkdir -p /app/backend /app/html /app/conf /Volumes/8TB /Volumes/8TB_RAID1
|
||||
|
||||
# Installeer een lichtgewicht Python API framework (FastAPI)
|
||||
# We gebruiken --break-system-packages omdat we in een container zitten
|
||||
RUN pip3 install fastapi uvicorn python-multipart --break-system-packages
|
||||
RUN pip3 install fastapi uvicorn python-multipart httpx --break-system-packages
|
||||
|
||||
# Exposeer de poort voor de webinterface
|
||||
EXPOSE 8030
|
||||
|
||||
+279
-441
@@ -1,522 +1,360 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import fnmatch
|
||||
import html
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
import secrets
|
||||
import shutil
|
||||
import stat
|
||||
import time
|
||||
from datetime import datetime
|
||||
import struct
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from typing import Literal, Optional
|
||||
|
||||
from fastapi import Body, FastAPI, File, Form, HTTPException, Request, UploadFile
|
||||
from fastapi.responses import FileResponse, HTMLResponse, JSONResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from pydantic import BaseModel, Field
|
||||
from fastapi import FastAPI, HTTPException, Request
|
||||
from fastapi.responses import FileResponse, JSONResponse
|
||||
|
||||
APP_NAME = "Finder Commander"
|
||||
HOME_ROOT = Path.home().resolve()
|
||||
TRASH_DIR = HOME_ROOT / ".Trash"
|
||||
MAX_TEXT_PREVIEW_BYTES = 2 * 1024 * 1024
|
||||
CSRF_TOKEN = secrets.token_urlsafe(32)
|
||||
|
||||
app = FastAPI(title=APP_NAME)
|
||||
app.mount("/static", StaticFiles(directory=str(Path(__file__).parent / "static")), name="static")
|
||||
templates = Jinja2Templates(directory=str(Path(__file__).parent / "templates"))
|
||||
|
||||
|
||||
class PathsPayload(BaseModel):
|
||||
paths: list[str] = Field(default_factory=list)
|
||||
destination_dir: Optional[str] = None
|
||||
|
||||
|
||||
class RenamePayload(BaseModel):
|
||||
path: str
|
||||
new_name: str
|
||||
|
||||
|
||||
class DeletePayload(BaseModel):
|
||||
paths: list[str] = Field(default_factory=list)
|
||||
mode: Literal["trash", "permanent"] = "trash"
|
||||
|
||||
|
||||
class CommandPayload(BaseModel):
|
||||
command: str
|
||||
cwd: str = ""
|
||||
|
||||
PathsPayload.model_rebuild()
|
||||
RenamePayload.model_rebuild()
|
||||
DeletePayload.model_rebuild()
|
||||
CommandPayload.model_rebuild()
|
||||
|
||||
TEXT_SUFFIXES = {
|
||||
".md",
|
||||
".txt",
|
||||
".py",
|
||||
".js",
|
||||
".ts",
|
||||
".tsx",
|
||||
".jsx",
|
||||
".css",
|
||||
".html",
|
||||
".json",
|
||||
".yaml",
|
||||
".yml",
|
||||
".toml",
|
||||
".ini",
|
||||
".env",
|
||||
".log",
|
||||
".xml",
|
||||
".sh",
|
||||
".zsh",
|
||||
".bash",
|
||||
".c",
|
||||
".cpp",
|
||||
".h",
|
||||
".java",
|
||||
".go",
|
||||
".rs",
|
||||
".sql",
|
||||
".conf",
|
||||
".service",
|
||||
".container",
|
||||
".network",
|
||||
".pod",
|
||||
".kube",
|
||||
APP_NAME = "Finder Commander Remote Agent"
|
||||
DEFAULT_PORT = 8765
|
||||
TEXT_PREVIEW_MAX_BYTES = 256 * 1024
|
||||
TEXT_CONTENT_TYPES = {
|
||||
".txt": "text/plain",
|
||||
".log": "text/plain",
|
||||
".conf": "text/plain",
|
||||
".ini": "text/plain",
|
||||
".cfg": "text/plain",
|
||||
".md": "text/markdown",
|
||||
".yml": "text/yaml",
|
||||
".yaml": "text/yaml",
|
||||
".json": "application/json",
|
||||
".js": "text/javascript",
|
||||
".py": "text/x-python",
|
||||
".css": "text/css",
|
||||
".html": "text/html",
|
||||
}
|
||||
SPECIAL_TEXT_FILENAMES = {
|
||||
"dockerfile": "text/plain",
|
||||
"containerfile": "text/plain",
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AgentRuntimeConfig:
|
||||
config_path: Path | None
|
||||
agent_access_token: str
|
||||
shares: dict[str, str]
|
||||
display_name: str
|
||||
endpoint: str
|
||||
client_id: str
|
||||
platform: str
|
||||
|
||||
|
||||
def _now_iso() -> str:
|
||||
return datetime.utcnow().isoformat(timespec="seconds") + "Z"
|
||||
return datetime.now(tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
|
||||
|
||||
|
||||
def rel_from_home(path: Path) -> str:
|
||||
return "" if path == HOME_ROOT else str(path.relative_to(HOME_ROOT))
|
||||
def _candidate_config_paths() -> list[Path]:
|
||||
candidates: list[Path] = []
|
||||
env_path = os.getenv("FINDER_COMMANDER_REMOTE_AGENT_CONFIG", "").strip()
|
||||
if env_path:
|
||||
candidates.append(Path(env_path).expanduser().resolve(strict=False))
|
||||
base_dir = Path(__file__).resolve().parents[1]
|
||||
candidates.append(base_dir / "remote_client_agent.launchd.json")
|
||||
candidates.append(base_dir / "remote_client_agent.example.json")
|
||||
return candidates
|
||||
|
||||
|
||||
|
||||
def ensure_within_home(candidate: Path) -> Path:
|
||||
try:
|
||||
candidate.relative_to(HOME_ROOT)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=403, detail="Path escapes home directory") from exc
|
||||
return candidate
|
||||
def _load_raw_config() -> tuple[Path | None, dict]:
|
||||
for candidate in _candidate_config_paths():
|
||||
if candidate.is_file():
|
||||
try:
|
||||
raw = json.loads(candidate.read_text(encoding="utf-8"))
|
||||
except ValueError as exc:
|
||||
raise RuntimeError(f"Invalid JSON in config file: {candidate}") from exc
|
||||
if not isinstance(raw, dict):
|
||||
raise RuntimeError(f"Config file must contain a JSON object: {candidate}")
|
||||
return candidate.resolve(strict=False), raw
|
||||
return None, {}
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_runtime_config() -> AgentRuntimeConfig:
|
||||
config_path, raw = _load_raw_config()
|
||||
shares_raw = raw.get("shares", {})
|
||||
shares: dict[str, str] = {}
|
||||
if isinstance(shares_raw, dict):
|
||||
for key, value in shares_raw.items():
|
||||
normalized_key = str(key).strip()
|
||||
normalized_value = str(value).strip()
|
||||
if normalized_key and normalized_value:
|
||||
shares[normalized_key] = normalized_value
|
||||
|
||||
def sanitize_name(name: str) -> str:
|
||||
name = (name or "").strip()
|
||||
if not name or name in {".", ".."} or "/" in name:
|
||||
raise HTTPException(status_code=400, detail="Invalid name")
|
||||
return name
|
||||
return AgentRuntimeConfig(
|
||||
config_path=config_path,
|
||||
agent_access_token=os.getenv("FINDER_COMMANDER_AGENT_ACCESS_TOKEN", "").strip()
|
||||
or str(raw.get("agent_access_token", "")).strip(),
|
||||
shares=shares,
|
||||
display_name=str(raw.get("display_name", "")).strip(),
|
||||
endpoint=str(raw.get("public_endpoint", raw.get("endpoint", ""))).strip(),
|
||||
client_id=str(raw.get("client_id", "")).strip(),
|
||||
platform=str(raw.get("platform", "macos")).strip() or "macos",
|
||||
)
|
||||
|
||||
|
||||
|
||||
def resolve_user_path(raw_path: Optional[str], *, must_exist: bool = True) -> Path:
|
||||
raw_path = (raw_path or "").strip()
|
||||
candidate = (HOME_ROOT / raw_path).resolve(strict=False)
|
||||
candidate = ensure_within_home(candidate)
|
||||
if must_exist and not candidate.exists():
|
||||
raise HTTPException(status_code=404, detail="Path not found")
|
||||
return candidate
|
||||
|
||||
|
||||
|
||||
def check_origin(request: Request) -> None:
|
||||
origin = request.headers.get("origin")
|
||||
if not origin:
|
||||
def require_agent_auth(request: Request) -> None:
|
||||
config = get_runtime_config()
|
||||
if not config.agent_access_token:
|
||||
return
|
||||
expected = str(request.base_url).rstrip("/")
|
||||
if origin.rstrip("/") != expected:
|
||||
raise HTTPException(status_code=403, detail="Origin not allowed")
|
||||
authorization = request.headers.get("authorization", "").strip()
|
||||
if authorization != f"Bearer {config.agent_access_token}":
|
||||
raise_agent_error(
|
||||
status_code=403,
|
||||
code="invalid_agent_token",
|
||||
message="Invalid agent token",
|
||||
extra={
|
||||
"config_path": str(config.config_path) if config.config_path else None,
|
||||
"client_id": config.client_id or None,
|
||||
"display_name": config.display_name or None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
def check_csrf(request: Request) -> None:
|
||||
token = request.headers.get("x-csrf-token")
|
||||
if token != CSRF_TOKEN:
|
||||
raise HTTPException(status_code=403, detail="Invalid CSRF token")
|
||||
def raise_agent_error(status_code: int, code: str, message: str, *, extra: dict | None = None) -> None:
|
||||
detail = {"code": code, "message": message}
|
||||
if extra:
|
||||
detail.update(extra)
|
||||
raise HTTPException(status_code=status_code, detail=detail)
|
||||
|
||||
|
||||
|
||||
def perms_string(mode: int) -> str:
|
||||
return stat.filemode(mode)
|
||||
def get_share_root(share: str) -> Path:
|
||||
config = get_runtime_config()
|
||||
normalized_share = (share or "").strip()
|
||||
if normalized_share not in config.shares:
|
||||
raise_agent_error(404, "path_not_found", "Share not found")
|
||||
return Path(config.shares[normalized_share]).expanduser().resolve(strict=False)
|
||||
|
||||
|
||||
|
||||
def can_preview_text(path: Path) -> bool:
|
||||
if path.is_dir():
|
||||
return False
|
||||
if path.stat().st_size > MAX_TEXT_PREVIEW_BYTES:
|
||||
return False
|
||||
mime, _ = mimetypes.guess_type(path.name)
|
||||
if mime and (
|
||||
mime.startswith("text/")
|
||||
or mime in {"application/json", "application/xml", "application/javascript"}
|
||||
):
|
||||
return True
|
||||
return path.suffix.lower() in TEXT_SUFFIXES
|
||||
def ensure_within_root(root: Path, candidate: Path) -> Path:
|
||||
try:
|
||||
candidate.relative_to(root)
|
||||
except ValueError as exc:
|
||||
_ = exc
|
||||
raise_agent_error(403, "path_traversal_detected", "Path escapes share root")
|
||||
return candidate
|
||||
|
||||
|
||||
def resolve_share_path(share: str, raw_path: str, *, must_exist: bool = True) -> Path:
|
||||
root = get_share_root(share)
|
||||
normalized = (raw_path or "").strip().replace("\\", "/")
|
||||
if normalized.startswith("/") or any(part == ".." for part in normalized.split("/")):
|
||||
raise_agent_error(400, "invalid_request", "Invalid share-relative path")
|
||||
candidate = (root / normalized).resolve(strict=False)
|
||||
candidate = ensure_within_root(root, candidate)
|
||||
if must_exist and not candidate.exists():
|
||||
raise_agent_error(404, "path_not_found", "Path not found")
|
||||
return candidate
|
||||
|
||||
def entry_payload(path: Path) -> dict:
|
||||
st = path.lstat()
|
||||
kind = "directory" if path.is_dir() else "file"
|
||||
mime, _ = mimetypes.guess_type(path.name)
|
||||
|
||||
def directory_entry_payload(path: Path) -> dict:
|
||||
stat_result = path.lstat()
|
||||
return {
|
||||
"name": path.name,
|
||||
"rel_path": rel_from_home(path),
|
||||
"parent_rel_path": rel_from_home(path.parent),
|
||||
"kind": kind,
|
||||
"is_symlink": path.is_symlink(),
|
||||
"size": st.st_size,
|
||||
"modified": datetime.fromtimestamp(st.st_mtime).isoformat(timespec="seconds"),
|
||||
"mime": mime or "application/octet-stream",
|
||||
"perms": perms_string(st.st_mode),
|
||||
"can_preview_text": can_preview_text(path) if path.is_file() else False,
|
||||
"kind": "directory" if path.is_dir() else "file",
|
||||
"size": stat_result.st_size,
|
||||
"modified": datetime.fromtimestamp(stat_result.st_mtime, tz=timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
}
|
||||
|
||||
|
||||
def info_payload(path: Path, *, share: str, raw_path: str) -> dict:
|
||||
stat_result = path.lstat()
|
||||
kind = "directory" if path.is_dir() else "file"
|
||||
mime, _ = mimetypes.guess_type(path.name)
|
||||
width, height = image_dimensions(path) if path.is_file() else (None, None)
|
||||
return {
|
||||
"share": share,
|
||||
"path": raw_path.strip().replace("\\", "/").strip("/"),
|
||||
"name": path.name,
|
||||
"kind": kind,
|
||||
"size": None if path.is_dir() else stat_result.st_size,
|
||||
"modified": datetime.fromtimestamp(stat_result.st_mtime, tz=timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
"content_type": mime or "application/octet-stream",
|
||||
"extension": path.suffix.lower() or None,
|
||||
"width": width,
|
||||
"height": height,
|
||||
"owner": None,
|
||||
"group": None,
|
||||
"config_path": str(get_runtime_config().config_path) if get_runtime_config().config_path else None,
|
||||
}
|
||||
|
||||
def sorted_entries(path: Path, show_hidden: bool = False) -> list[dict]:
|
||||
|
||||
def list_directory(path: Path, *, show_hidden: bool) -> list[dict]:
|
||||
try:
|
||||
children = list(path.iterdir())
|
||||
except PermissionError as exc:
|
||||
raise HTTPException(status_code=403, detail="Permission denied by operating system") from exc
|
||||
_ = exc
|
||||
raise_agent_error(403, "forbidden", "Permission denied by operating system")
|
||||
filtered = []
|
||||
for child in children:
|
||||
if not show_hidden and child.name.startswith('.'):
|
||||
if not show_hidden and child.name.startswith("."):
|
||||
continue
|
||||
filtered.append(child)
|
||||
filtered.sort(key=lambda p: (not p.is_dir(), p.name.lower()))
|
||||
return [entry_payload(child) for child in filtered]
|
||||
filtered.sort(key=lambda item: (not item.is_dir(), item.name.lower()))
|
||||
return [directory_entry_payload(child) for child in filtered]
|
||||
|
||||
|
||||
|
||||
def move_to_trash(path: Path) -> Path:
|
||||
TRASH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
target = TRASH_DIR / path.name
|
||||
if target.exists():
|
||||
target = TRASH_DIR / f"{target.stem}-{int(time.time())}{target.suffix}"
|
||||
shutil.move(str(path), str(target))
|
||||
return target
|
||||
def text_content_type_for_name(name: str) -> str | None:
|
||||
lowered = (name or "").lower()
|
||||
special = SPECIAL_TEXT_FILENAMES.get(lowered)
|
||||
if special:
|
||||
return special
|
||||
return TEXT_CONTENT_TYPES.get(Path(name).suffix.lower())
|
||||
|
||||
|
||||
|
||||
def copy_entry(source: Path, destination_dir: Path) -> Path:
|
||||
destination = destination_dir / source.name
|
||||
if destination.exists():
|
||||
raise HTTPException(status_code=409, detail=f"Destination already exists: {destination.name}")
|
||||
if source.is_dir():
|
||||
shutil.copytree(source, destination, symlinks=True)
|
||||
else:
|
||||
shutil.copy2(source, destination, follow_symlinks=False)
|
||||
return destination
|
||||
def read_text_preview(path: Path, *, max_bytes: int) -> dict:
|
||||
size = int(path.stat().st_size)
|
||||
preview_limit = min(max(1, int(max_bytes)), TEXT_PREVIEW_MAX_BYTES)
|
||||
with path.open("rb") as handle:
|
||||
raw = handle.read(preview_limit + 1)
|
||||
truncated = size > preview_limit or len(raw) > preview_limit
|
||||
if truncated:
|
||||
raw = raw[:preview_limit]
|
||||
if b"\x00" in raw:
|
||||
raise_agent_error(409, "unsupported_type", "Binary content is not supported for text preview")
|
||||
try:
|
||||
content = raw.decode("utf-8")
|
||||
except UnicodeDecodeError as exc:
|
||||
_ = exc
|
||||
raise_agent_error(409, "unsupported_type", "Binary content is not supported for text preview")
|
||||
return {
|
||||
"size": size,
|
||||
"modified": datetime.fromtimestamp(path.stat().st_mtime, tz=timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
"encoding": "utf-8",
|
||||
"truncated": truncated,
|
||||
"content": content,
|
||||
}
|
||||
|
||||
|
||||
|
||||
def move_entry(source: Path, destination_dir: Path) -> Path:
|
||||
destination = destination_dir / source.name
|
||||
if destination.exists():
|
||||
raise HTTPException(status_code=409, detail=f"Destination already exists: {destination.name}")
|
||||
shutil.move(str(source), str(destination))
|
||||
return destination
|
||||
def image_dimensions(path: Path) -> tuple[int | None, int | None]:
|
||||
suffix = path.suffix.lower()
|
||||
try:
|
||||
if suffix == ".png":
|
||||
with path.open("rb") as handle:
|
||||
header = handle.read(24)
|
||||
if len(header) < 24 or header[:8] != b"\x89PNG\r\n\x1a\n":
|
||||
return None, None
|
||||
return struct.unpack(">II", header[16:24])
|
||||
if suffix == ".gif":
|
||||
with path.open("rb") as handle:
|
||||
header = handle.read(10)
|
||||
if len(header) < 10 or header[:6] not in {b"GIF87a", b"GIF89a"}:
|
||||
return None, None
|
||||
return struct.unpack("<HH", header[6:10])
|
||||
if suffix == ".bmp":
|
||||
with path.open("rb") as handle:
|
||||
header = handle.read(26)
|
||||
if len(header) < 26 or header[:2] != b"BM":
|
||||
return None, None
|
||||
width, height = struct.unpack("<ii", header[18:26])
|
||||
return abs(width), abs(height)
|
||||
except (OSError, ValueError, struct.error):
|
||||
return None, None
|
||||
return None, None
|
||||
|
||||
|
||||
|
||||
def select_paths_or_current(paths: list[str], cwd: str) -> list[Path]:
|
||||
result = [resolve_user_path(p) for p in paths]
|
||||
if not result:
|
||||
raise HTTPException(status_code=400, detail="No paths selected")
|
||||
return result
|
||||
app = FastAPI(title=APP_NAME)
|
||||
|
||||
|
||||
|
||||
def resolve_from_cwd(cwd_path: Path, raw: str, *, must_exist: bool = True) -> Path:
|
||||
raw = (raw or "").strip()
|
||||
candidate = (cwd_path / raw).resolve(strict=False)
|
||||
candidate = ensure_within_home(candidate)
|
||||
if must_exist and not candidate.exists():
|
||||
raise HTTPException(status_code=404, detail="Path not found")
|
||||
return candidate
|
||||
|
||||
|
||||
def run_command(command: str, cwd: str) -> dict:
|
||||
command = (command or "").strip()
|
||||
if not command:
|
||||
raise HTTPException(status_code=400, detail="Empty command")
|
||||
cwd_path = resolve_user_path(cwd)
|
||||
if not cwd_path.is_dir():
|
||||
raise HTTPException(status_code=400, detail="CWD is not a directory")
|
||||
|
||||
parts = command.split()
|
||||
verb = parts[0].lower()
|
||||
args = parts[1:]
|
||||
|
||||
if verb == "cd":
|
||||
raw_target = " ".join(args) if args else ""
|
||||
target = resolve_user_path(raw_target) if raw_target.startswith("/") else resolve_from_cwd(cwd_path, raw_target or ".")
|
||||
if not target.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Target is not a directory")
|
||||
return {"ok": True, "action": "cd", "cwd": rel_from_home(target), "message": str(target)}
|
||||
|
||||
if verb == "mkdir":
|
||||
name = sanitize_name(" ".join(args))
|
||||
target = resolve_from_cwd(cwd_path, name, must_exist=False)
|
||||
target.mkdir(exist_ok=False)
|
||||
return {"ok": True, "action": "mkdir", "cwd": rel_from_home(cwd_path), "message": f"Created {name}"}
|
||||
|
||||
if verb == "touch":
|
||||
name = sanitize_name(" ".join(args))
|
||||
target = resolve_from_cwd(cwd_path, name, must_exist=False)
|
||||
target.touch(exist_ok=False)
|
||||
return {"ok": True, "action": "touch", "cwd": rel_from_home(cwd_path), "message": f"Created {name}"}
|
||||
|
||||
if verb == "select":
|
||||
pattern = " ".join(args).strip() or "*"
|
||||
entries = sorted_entries(cwd_path, show_hidden=True)
|
||||
matches = [e["rel_path"] for e in entries if fnmatch.fnmatch(e["name"], pattern)]
|
||||
return {
|
||||
"ok": True,
|
||||
"action": "select",
|
||||
"cwd": rel_from_home(cwd_path),
|
||||
"message": f"Matched {len(matches)} item(s)",
|
||||
"matches": matches,
|
||||
}
|
||||
|
||||
if verb == "help":
|
||||
return {
|
||||
"ok": True,
|
||||
"action": "help",
|
||||
"cwd": rel_from_home(cwd_path),
|
||||
"message": "Commands: cd <path>, mkdir <name>, touch <name>, select <glob>, help",
|
||||
}
|
||||
|
||||
raise HTTPException(status_code=400, detail="Unsupported command")
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def harden_headers(request: Request, call_next):
|
||||
response = await call_next(request)
|
||||
response.headers["X-Frame-Options"] = "DENY"
|
||||
response.headers["Content-Security-Policy"] = (
|
||||
"default-src 'self'; img-src 'self' data:; style-src 'self'; script-src 'self'; "
|
||||
"connect-src 'self'; frame-ancestors 'none'; base-uri 'self'; form-action 'self'"
|
||||
)
|
||||
response.headers["Referrer-Policy"] = "no-referrer"
|
||||
response.headers["X-Content-Type-Options"] = "nosniff"
|
||||
return response
|
||||
@app.get("/")
|
||||
def root() -> dict:
|
||||
config = get_runtime_config()
|
||||
return {
|
||||
"ok": True,
|
||||
"app": APP_NAME,
|
||||
"time": _now_iso(),
|
||||
"client_id": config.client_id or None,
|
||||
"display_name": config.display_name or None,
|
||||
"config_path": str(config.config_path) if config.config_path else None,
|
||||
"shares": sorted(config.shares.keys()),
|
||||
"auth_enabled": bool(config.agent_access_token),
|
||||
}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health() -> dict:
|
||||
return {"ok": True, "app": APP_NAME, "time": _now_iso(), "home": str(HOME_ROOT)}
|
||||
|
||||
|
||||
@app.get("/", response_class=HTMLResponse)
|
||||
def index(request: Request):
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"index.html",
|
||||
{
|
||||
"app_name": APP_NAME,
|
||||
"home_root": str(HOME_ROOT),
|
||||
"csrf_token": CSRF_TOKEN,
|
||||
},
|
||||
)
|
||||
def health(request: Request) -> dict:
|
||||
require_agent_auth(request)
|
||||
config = get_runtime_config()
|
||||
return {
|
||||
"ok": True,
|
||||
"app": APP_NAME,
|
||||
"time": _now_iso(),
|
||||
"client_id": config.client_id or None,
|
||||
"display_name": config.display_name or None,
|
||||
"platform": config.platform,
|
||||
"endpoint": config.endpoint or None,
|
||||
"shares": sorted(config.shares.keys()),
|
||||
"config_path": str(config.config_path) if config.config_path else None,
|
||||
"port_hint": DEFAULT_PORT,
|
||||
"auth_enabled": bool(config.agent_access_token),
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/list")
|
||||
def api_list(path: str = "", show_hidden: bool = False) -> dict:
|
||||
target = resolve_user_path(path)
|
||||
def api_list(request: Request, share: str, path: str = "", show_hidden: bool = False) -> dict:
|
||||
require_agent_auth(request)
|
||||
target = resolve_share_path(share, path)
|
||||
if not target.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Path is not a directory")
|
||||
return {
|
||||
"cwd": rel_from_home(target),
|
||||
"absolute": str(target),
|
||||
"parent": "" if target == HOME_ROOT else rel_from_home(target.parent),
|
||||
"entries": sorted_entries(target, show_hidden=show_hidden),
|
||||
"share": share.strip(),
|
||||
"path": path.strip().replace("\\", "/").strip("/"),
|
||||
"entries": list_directory(target, show_hidden=show_hidden),
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/info")
|
||||
def api_info(request: Request, share: str, path: str = "") -> dict:
|
||||
require_agent_auth(request)
|
||||
target = resolve_share_path(share, path)
|
||||
return info_payload(target, share=share.strip(), raw_path=path)
|
||||
|
||||
|
||||
@app.get("/api/read")
|
||||
def api_read(path: str) -> dict:
|
||||
target = resolve_user_path(path)
|
||||
def api_read(request: Request, share: str, path: str = "", max_bytes: int = TEXT_PREVIEW_MAX_BYTES) -> dict:
|
||||
require_agent_auth(request)
|
||||
target = resolve_share_path(share, path)
|
||||
if target.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Cannot read a directory as text")
|
||||
if not can_preview_text(target):
|
||||
raise HTTPException(status_code=415, detail="File is not previewable as text")
|
||||
try:
|
||||
content = target.read_text(encoding="utf-8")
|
||||
encoding = "utf-8"
|
||||
except UnicodeDecodeError:
|
||||
content = target.read_text(encoding="utf-8", errors="replace")
|
||||
encoding = "utf-8 (lossy)"
|
||||
raise_agent_error(409, "type_conflict", "Source must be a file")
|
||||
if not target.is_file():
|
||||
raise_agent_error(409, "type_conflict", "Unsupported path type for read")
|
||||
content_type = text_content_type_for_name(target.name)
|
||||
if content_type is None:
|
||||
raise_agent_error(409, "unsupported_type", "File type is not supported for text preview")
|
||||
return {
|
||||
"path": rel_from_home(target),
|
||||
"name": target.name,
|
||||
"encoding": encoding,
|
||||
"content": content,
|
||||
"size": target.stat().st_size,
|
||||
"path": path.strip().replace("\\", "/").strip("/"),
|
||||
"content_type": content_type,
|
||||
**read_text_preview(target, max_bytes=max_bytes),
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/meta")
|
||||
def api_meta(path: str) -> dict:
|
||||
target = resolve_user_path(path)
|
||||
payload = entry_payload(target)
|
||||
payload["absolute"] = str(target)
|
||||
return payload
|
||||
|
||||
|
||||
@app.get("/api/download")
|
||||
def api_download(path: str):
|
||||
target = resolve_user_path(path)
|
||||
def api_download(request: Request, share: str, path: str = "") -> FileResponse:
|
||||
require_agent_auth(request)
|
||||
target = resolve_share_path(share, path)
|
||||
if target.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Cannot download a directory")
|
||||
return FileResponse(path=target, filename=target.name)
|
||||
|
||||
|
||||
@app.get("/api/preview")
|
||||
def api_preview(path: str):
|
||||
target = resolve_user_path(path)
|
||||
if target.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Cannot preview a directory")
|
||||
mime, _ = mimetypes.guess_type(target.name)
|
||||
if not mime or not mime.startswith("image/"):
|
||||
raise HTTPException(status_code=415, detail="Preview only supports images")
|
||||
return FileResponse(path=target, media_type=mime)
|
||||
|
||||
|
||||
@app.put("/api/write")
|
||||
async def api_write(request: Request, path: str = Form(...), content: str = Form(...)) -> dict:
|
||||
check_origin(request)
|
||||
check_csrf(request)
|
||||
target = resolve_user_path(path, must_exist=False)
|
||||
ensure_within_home(target.parent.resolve(strict=False))
|
||||
if target.exists() and target.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Cannot overwrite a directory")
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
tmp = target.with_name(target.name + ".tmp-write")
|
||||
tmp.write_text(content, encoding="utf-8")
|
||||
os.replace(tmp, target)
|
||||
return {"ok": True, "path": rel_from_home(target)}
|
||||
|
||||
|
||||
@app.post("/api/mkdir")
|
||||
async def api_mkdir(request: Request, path: str = Form(...), name: str = Form(...)) -> dict:
|
||||
check_origin(request)
|
||||
check_csrf(request)
|
||||
base = resolve_user_path(path)
|
||||
if not base.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Base path is not a directory")
|
||||
child = resolve_user_path(str(Path(rel_from_home(base)) / sanitize_name(name)), must_exist=False)
|
||||
child.mkdir(parents=False, exist_ok=False)
|
||||
return {"ok": True, "path": rel_from_home(child)}
|
||||
|
||||
|
||||
@app.post("/api/upload")
|
||||
async def api_upload(request: Request, path: str = Form(...), files: list[UploadFile] = File(...)) -> dict:
|
||||
check_origin(request)
|
||||
check_csrf(request)
|
||||
base = resolve_user_path(path)
|
||||
if not base.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Upload target is not a directory")
|
||||
saved: list[str] = []
|
||||
for upload in files:
|
||||
filename = Path(upload.filename or "").name
|
||||
if not filename:
|
||||
continue
|
||||
destination = resolve_user_path(str(Path(rel_from_home(base)) / filename), must_exist=False)
|
||||
with destination.open("wb") as f:
|
||||
while chunk := await upload.read(1024 * 1024):
|
||||
f.write(chunk)
|
||||
saved.append(rel_from_home(destination))
|
||||
return {"ok": True, "saved": saved}
|
||||
|
||||
|
||||
@app.post("/api/rename")
|
||||
async def api_rename(request: Request, payload: RenamePayload) -> dict:
|
||||
check_origin(request)
|
||||
check_csrf(request)
|
||||
source = resolve_user_path(payload.path)
|
||||
destination = resolve_user_path(str(Path(rel_from_home(source.parent)) / sanitize_name(payload.new_name)), must_exist=False)
|
||||
if destination.exists():
|
||||
raise HTTPException(status_code=409, detail="Destination already exists")
|
||||
os.replace(source, destination)
|
||||
return {"ok": True, "old_path": rel_from_home(source), "new_path": rel_from_home(destination)}
|
||||
|
||||
|
||||
@app.post("/api/copy")
|
||||
async def api_copy(request: Request, payload: PathsPayload) -> dict:
|
||||
check_origin(request)
|
||||
check_csrf(request)
|
||||
if payload.destination_dir is None:
|
||||
raise HTTPException(status_code=400, detail="Missing destination_dir")
|
||||
destination_dir = resolve_user_path(payload.destination_dir)
|
||||
if not destination_dir.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Destination is not a directory")
|
||||
results = []
|
||||
for source in select_paths_or_current(payload.paths, payload.destination_dir):
|
||||
copied = copy_entry(source, destination_dir)
|
||||
results.append(rel_from_home(copied))
|
||||
return {"ok": True, "copied": results}
|
||||
|
||||
|
||||
@app.post("/api/move")
|
||||
async def api_move(request: Request, payload: PathsPayload) -> dict:
|
||||
check_origin(request)
|
||||
check_csrf(request)
|
||||
if payload.destination_dir is None:
|
||||
raise HTTPException(status_code=400, detail="Missing destination_dir")
|
||||
destination_dir = resolve_user_path(payload.destination_dir)
|
||||
if not destination_dir.is_dir():
|
||||
raise HTTPException(status_code=400, detail="Destination is not a directory")
|
||||
results = []
|
||||
for source in select_paths_or_current(payload.paths, payload.destination_dir):
|
||||
moved = move_entry(source, destination_dir)
|
||||
results.append(rel_from_home(moved))
|
||||
return {"ok": True, "moved": results}
|
||||
|
||||
|
||||
@app.post("/api/delete")
|
||||
async def api_delete(request: Request, payload: DeletePayload) -> dict:
|
||||
check_origin(request)
|
||||
check_csrf(request)
|
||||
paths = select_paths_or_current(payload.paths, "")
|
||||
deleted = []
|
||||
for target in paths:
|
||||
if target == HOME_ROOT:
|
||||
raise HTTPException(status_code=400, detail="Refusing to delete home root")
|
||||
if payload.mode == "trash":
|
||||
moved = move_to_trash(target)
|
||||
deleted.append(str(moved))
|
||||
else:
|
||||
if target.is_dir():
|
||||
shutil.rmtree(target)
|
||||
else:
|
||||
target.unlink()
|
||||
deleted.append(rel_from_home(target))
|
||||
return {"ok": True, "mode": payload.mode, "deleted": deleted}
|
||||
|
||||
|
||||
@app.post("/api/command")
|
||||
async def api_command(request: Request, payload: CommandPayload) -> dict:
|
||||
check_origin(request)
|
||||
check_csrf(request)
|
||||
return run_command(payload.command, payload.cwd)
|
||||
raise_agent_error(409, "type_conflict", "Source must be a file")
|
||||
if not target.is_file():
|
||||
raise_agent_error(409, "type_conflict", "Unsupported path type for download")
|
||||
return FileResponse(
|
||||
path=target,
|
||||
media_type=mimetypes.guess_type(target.name)[0] or "application/octet-stream",
|
||||
filename=target.name,
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(HTTPException)
|
||||
async def http_exception_handler(_: Request, exc: HTTPException):
|
||||
async def http_exception_handler(_: Request, exc: HTTPException) -> JSONResponse:
|
||||
return JSONResponse(status_code=exc.status_code, content={"ok": False, "detail": exc.detail})
|
||||
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def unhandled_exception_handler(_: Request, exc: Exception):
|
||||
return JSONResponse(status_code=500, content={"ok": False, "detail": html.escape(str(exc))})
|
||||
async def unhandled_exception_handler(_: Request, exc: Exception) -> JSONResponse:
|
||||
return JSONResponse(status_code=500, content={"ok": False, "detail": str(exc)})
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.webmanager.remote-client-agent</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/bin/python3</string>
|
||||
<string>-u</string>
|
||||
<string>/workspace/webmanager-mvp/finder_commander/remote_client_agent.py</string>
|
||||
<string>--config</string>
|
||||
<string>/workspace/webmanager-mvp/finder_commander/remote_client_agent.launchd.json</string>
|
||||
</array>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
<key>StandardOutPath</key>
|
||||
<string>/tmp/com.webmanager.remote-client-agent.out.log</string>
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/tmp/com.webmanager.remote-client-agent.err.log</string>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"agent_access_token": "change-me-agent-token",
|
||||
"client_id": "",
|
||||
"display_name": "MacBook Pro van Jan",
|
||||
"endpoint": "http://192.168.1.25:8765",
|
||||
"heartbeat_interval_seconds": 20,
|
||||
"platform": "macos",
|
||||
"registration_token": "change-me-registration-token",
|
||||
"shares": {
|
||||
"downloads": "/Users/jan/Downloads",
|
||||
"movies": "/Users/jan/Movies",
|
||||
"pictures": "/Users/jan/Pictures"
|
||||
},
|
||||
"webmanager_base_url": "http://127.0.0.1:8080"
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"agent_access_token": "change-me-agent-token",
|
||||
"client_id": "",
|
||||
"display_name": "MacBook Pro van Jan",
|
||||
"endpoint": "http://192.168.1.25:8765",
|
||||
"heartbeat_interval_seconds": 20,
|
||||
"platform": "macos",
|
||||
"registration_token": "change-me-registration-token",
|
||||
"shares": {
|
||||
"downloads": "/Users/jan/Downloads",
|
||||
"movies": "/Users/jan/Movies",
|
||||
"pictures": "/Users/jan/Pictures"
|
||||
},
|
||||
"webmanager_base_url": "http://127.0.0.1:8080"
|
||||
}
|
||||
@@ -0,0 +1,220 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
import threading
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from urllib import error, request
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import uvicorn
|
||||
|
||||
|
||||
AGENT_VERSION = "1.1.0-phase1"
|
||||
|
||||
|
||||
@dataclass
|
||||
class AgentConfig:
|
||||
config_path: Path
|
||||
webmanager_base_url: str
|
||||
registration_token: str
|
||||
agent_access_token: str
|
||||
display_name: str
|
||||
endpoint: str
|
||||
shares: dict[str, str]
|
||||
heartbeat_interval_seconds: int
|
||||
client_id: str
|
||||
platform: str = "macos"
|
||||
|
||||
@property
|
||||
def normalized_base_url(self) -> str:
|
||||
return self.webmanager_base_url.rstrip("/")
|
||||
|
||||
|
||||
def load_config(config_path: Path) -> AgentConfig:
|
||||
raw = json.loads(config_path.read_text(encoding="utf-8"))
|
||||
client_id = str(raw.get("client_id", "")).strip()
|
||||
if not client_id:
|
||||
client_id = str(uuid.uuid4())
|
||||
raw["client_id"] = client_id
|
||||
config_path.write_text(json.dumps(raw, indent=2, sort_keys=True) + "\n", encoding="utf-8")
|
||||
|
||||
shares_raw = raw.get("shares") or {}
|
||||
shares: dict[str, str] = {}
|
||||
if isinstance(shares_raw, dict):
|
||||
for key, value in shares_raw.items():
|
||||
normalized_key = str(key).strip()
|
||||
normalized_value = str(value).strip()
|
||||
if normalized_key and normalized_value:
|
||||
shares[normalized_key] = normalized_value
|
||||
|
||||
if not shares:
|
||||
raise ValueError("config requires at least one share")
|
||||
|
||||
return AgentConfig(
|
||||
config_path=config_path,
|
||||
webmanager_base_url=str(raw.get("webmanager_base_url", "")).strip(),
|
||||
registration_token=str(raw.get("registration_token", "")).strip(),
|
||||
agent_access_token=str(raw.get("agent_access_token", "")).strip(),
|
||||
display_name=str(raw.get("display_name", "")).strip(),
|
||||
endpoint=str(raw.get("public_endpoint", raw.get("endpoint", ""))).strip(),
|
||||
shares=shares,
|
||||
heartbeat_interval_seconds=max(5, int(raw.get("heartbeat_interval_seconds", 20))),
|
||||
client_id=client_id,
|
||||
platform=str(raw.get("platform", "macos")).strip() or "macos",
|
||||
)
|
||||
|
||||
|
||||
def require_non_empty(value: str, field: str) -> str:
|
||||
normalized = value.strip()
|
||||
if not normalized:
|
||||
raise ValueError(f"config field '{field}' is required")
|
||||
return normalized
|
||||
|
||||
|
||||
def build_register_payload(config: AgentConfig) -> dict[str, Any]:
|
||||
return {
|
||||
"client_id": config.client_id,
|
||||
"display_name": config.display_name,
|
||||
"platform": config.platform,
|
||||
"agent_version": AGENT_VERSION,
|
||||
"endpoint": config.endpoint,
|
||||
"shares": [{"key": key, "label": key.capitalize()} for key in sorted(config.shares.keys())],
|
||||
}
|
||||
|
||||
|
||||
def build_heartbeat_payload(config: AgentConfig) -> dict[str, Any]:
|
||||
return {
|
||||
"client_id": config.client_id,
|
||||
"agent_version": AGENT_VERSION,
|
||||
}
|
||||
|
||||
|
||||
def post_json(url: str, token: str, payload: dict[str, Any]) -> dict[str, Any]:
|
||||
data = json.dumps(payload).encode("utf-8")
|
||||
req = request.Request(
|
||||
url,
|
||||
method="POST",
|
||||
data=data,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {token}",
|
||||
},
|
||||
)
|
||||
with request.urlopen(req, timeout=10) as resp:
|
||||
return json.loads(resp.read().decode("utf-8"))
|
||||
|
||||
|
||||
def run_heartbeat_loop(config: AgentConfig, stop_event: threading.Event) -> None:
|
||||
require_non_empty(config.webmanager_base_url, "webmanager_base_url")
|
||||
require_non_empty(config.registration_token, "registration_token")
|
||||
require_non_empty(config.agent_access_token, "agent_access_token")
|
||||
require_non_empty(config.display_name, "display_name")
|
||||
require_non_empty(config.endpoint, "public_endpoint")
|
||||
|
||||
register_url = f"{config.normalized_base_url}/api/clients/register"
|
||||
heartbeat_url = f"{config.normalized_base_url}/api/clients/heartbeat"
|
||||
|
||||
print(f"Starting remote client agent for {config.display_name} ({config.client_id})", flush=True)
|
||||
print(f"Using config: {config.config_path}", flush=True)
|
||||
print("agent_access_token is configured for authenticated agent endpoints", flush=True)
|
||||
|
||||
while not stop_event.is_set():
|
||||
try:
|
||||
post_json(register_url, config.registration_token, build_register_payload(config))
|
||||
print("register ok", flush=True)
|
||||
break
|
||||
except error.HTTPError as exc:
|
||||
print(f"register failed: HTTP {exc.code}", file=sys.stderr, flush=True)
|
||||
except error.URLError as exc:
|
||||
print(f"register failed: {exc.reason}", file=sys.stderr, flush=True)
|
||||
if stop_event.wait(config.heartbeat_interval_seconds):
|
||||
return
|
||||
|
||||
while not stop_event.is_set():
|
||||
try:
|
||||
post_json(heartbeat_url, config.registration_token, build_heartbeat_payload(config))
|
||||
print("heartbeat ok", flush=True)
|
||||
except error.HTTPError as exc:
|
||||
print(f"heartbeat failed: HTTP {exc.code}", file=sys.stderr, flush=True)
|
||||
except error.URLError as exc:
|
||||
print(f"heartbeat failed: {exc.reason}", file=sys.stderr, flush=True)
|
||||
if stop_event.wait(config.heartbeat_interval_seconds):
|
||||
return
|
||||
|
||||
|
||||
def resolve_bind_host(config: AgentConfig, requested_host: str | None) -> str:
|
||||
normalized = (requested_host or "").strip()
|
||||
if normalized:
|
||||
return normalized
|
||||
return "0.0.0.0"
|
||||
|
||||
|
||||
def resolve_bind_port(config: AgentConfig, requested_port: int | None) -> int:
|
||||
if requested_port and requested_port > 0:
|
||||
return requested_port
|
||||
parsed = urlparse(config.endpoint)
|
||||
if parsed.port:
|
||||
return parsed.port
|
||||
if parsed.scheme == "https":
|
||||
return 443
|
||||
if parsed.scheme == "http":
|
||||
return 80
|
||||
return 8765
|
||||
|
||||
|
||||
def run(config: AgentConfig, requested_host: str | None, requested_port: int | None) -> None:
|
||||
stop_event = threading.Event()
|
||||
heartbeat_thread = threading.Thread(
|
||||
target=run_heartbeat_loop,
|
||||
args=(config, stop_event),
|
||||
daemon=True,
|
||||
name="remote-client-heartbeat",
|
||||
)
|
||||
heartbeat_thread.start()
|
||||
|
||||
bind_host = resolve_bind_host(config, requested_host)
|
||||
bind_port = resolve_bind_port(config, requested_port)
|
||||
print(f"Starting HTTP agent on {bind_host}:{bind_port}", flush=True)
|
||||
print(f"Advertised endpoint: {config.endpoint}", flush=True)
|
||||
try:
|
||||
import os
|
||||
|
||||
os.environ["FINDER_COMMANDER_REMOTE_AGENT_CONFIG"] = str(config.config_path)
|
||||
uvicorn.run("app.main:app", host=bind_host, port=bind_port)
|
||||
finally:
|
||||
stop_event.set()
|
||||
heartbeat_thread.join(timeout=2)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Remote client agent Phase 1 for WebManager MVP")
|
||||
parser.add_argument(
|
||||
"--config",
|
||||
default=str(Path(__file__).resolve().with_name("remote_client_agent.example.json")),
|
||||
help="Path to remote client agent config JSON",
|
||||
)
|
||||
parser.add_argument("--host", default="", help="Bind host for the HTTP agent, defaults to 0.0.0.0")
|
||||
parser.add_argument("--port", type=int, default=0, help="Bind port for the HTTP agent, defaults to endpoint port")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
try:
|
||||
config = load_config(Path(args.config).resolve())
|
||||
run(config, requested_host=args.host, requested_port=args.port)
|
||||
except KeyboardInterrupt:
|
||||
return 130
|
||||
except Exception as exc:
|
||||
print(str(exc), file=sys.stderr)
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import uvicorn
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Run Finder Commander remote agent HTTP API")
|
||||
parser.add_argument(
|
||||
"--config",
|
||||
required=True,
|
||||
help="Path to remote agent config JSON",
|
||||
)
|
||||
parser.add_argument("--host", default="0.0.0.0", help="Listen host")
|
||||
parser.add_argument("--port", type=int, default=8765, help="Listen port")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
config_path = Path(args.config).expanduser().resolve(strict=False)
|
||||
if not config_path.is_file():
|
||||
raise SystemExit(f"Config file not found: {config_path}")
|
||||
|
||||
os.environ["FINDER_COMMANDER_REMOTE_AGENT_CONFIG"] = str(config_path)
|
||||
print(f"Using config: {config_path}", flush=True)
|
||||
uvicorn.run("app.main:app", host=args.host, port=args.port)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -0,0 +1,79 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import HTTPException
|
||||
from starlette.requests import Request
|
||||
|
||||
from finder_commander.app import main as agent_main
|
||||
|
||||
|
||||
class AgentFileEndpointsTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.share_root = Path(self.temp_dir.name) / "Downloads"
|
||||
self.share_root.mkdir(parents=True, exist_ok=True)
|
||||
self.outside_root = Path(self.temp_dir.name) / "Outside"
|
||||
self.outside_root.mkdir(parents=True, exist_ok=True)
|
||||
self.config_path = Path(self.temp_dir.name) / "agent.json"
|
||||
self.config_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"agent_access_token": "agent-secret",
|
||||
"client_id": "client-123",
|
||||
"display_name": "Jan MacBook",
|
||||
"shares": {"downloads": str(self.share_root)},
|
||||
}
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
os.environ["FINDER_COMMANDER_REMOTE_AGENT_CONFIG"] = str(self.config_path)
|
||||
agent_main.get_runtime_config.cache_clear()
|
||||
|
||||
def tearDown(self) -> None:
|
||||
os.environ.pop("FINDER_COMMANDER_REMOTE_AGENT_CONFIG", None)
|
||||
agent_main.get_runtime_config.cache_clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
@staticmethod
|
||||
def _authorized_request() -> Request:
|
||||
return Request({"type": "http", "headers": [(b"authorization", b"Bearer agent-secret")]})
|
||||
|
||||
def test_info_read_and_download_success(self) -> None:
|
||||
notes = self.share_root / "notes.md"
|
||||
notes.write_text("# title\nhello\n", encoding="utf-8")
|
||||
|
||||
info_response = agent_main.api_info(self._authorized_request(), share="downloads", path="notes.md")
|
||||
self.assertEqual(info_response["kind"], "file")
|
||||
self.assertEqual(info_response["extension"], ".md")
|
||||
|
||||
read_response = agent_main.api_read(self._authorized_request(), share="downloads", path="notes.md", max_bytes=4)
|
||||
self.assertTrue(read_response["truncated"])
|
||||
self.assertEqual(read_response["content"], "# ti")
|
||||
|
||||
download_response = agent_main.api_download(self._authorized_request(), share="downloads", path="notes.md")
|
||||
self.assertEqual(download_response.media_type, "text/markdown")
|
||||
self.assertIn('attachment; filename="notes.md"', download_response.headers.get("content-disposition", ""))
|
||||
|
||||
def test_unknown_share_and_escape_outside_root_are_rejected(self) -> None:
|
||||
outside_file = self.outside_root / "secret.txt"
|
||||
outside_file.write_text("secret", encoding="utf-8")
|
||||
(self.share_root / "escape.txt").symlink_to(outside_file)
|
||||
|
||||
with self.assertRaises(HTTPException) as unknown_share:
|
||||
agent_main.api_info(self._authorized_request(), share="missing", path="notes.md")
|
||||
self.assertEqual(unknown_share.exception.status_code, 404)
|
||||
self.assertEqual(unknown_share.exception.detail["code"], "path_not_found")
|
||||
|
||||
with self.assertRaises(HTTPException) as escaped:
|
||||
agent_main.api_info(self._authorized_request(), share="downloads", path="escape.txt")
|
||||
self.assertEqual(escaped.exception.status_code, 403)
|
||||
self.assertEqual(escaped.exception.detail["code"], "path_traversal_detected")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,5 +1,17 @@
|
||||
# Remote Client Shares Implementation Phases V1.1
|
||||
|
||||
## Status
|
||||
|
||||
Per huidige repositorystatus zijn de in dit document beschreven implementatiefases afgerond:
|
||||
|
||||
- Phase 1: afgerond
|
||||
- Phase 2: afgerond
|
||||
- Phase 3: afgerond
|
||||
|
||||
Dit document beschrijft geen Phase 4.
|
||||
|
||||
De sectie `Later` hieronder blijft expliciet buiten de beschreven fasering van V1.1 en is geen impliciete volgende fase.
|
||||
|
||||
## Doel
|
||||
|
||||
Dit document splitst `REMOTE_CLIENT_SHARES_V1_DESIGN.md` op in pragmatische implementatiefases.
|
||||
@@ -32,6 +44,10 @@ Info, tekstpreview, eenvoudige image preview en download voor remote shares.
|
||||
|
||||
Alle write-acties, bookmarks/startup paths en cross-source flows.
|
||||
|
||||
Opmerking:
|
||||
|
||||
- `Later` betekent in dit document: bewust uitgestelde scope, niet een gedefinieerde volgende implementatiefase
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Client Registry
|
||||
@@ -285,6 +301,11 @@ Nieuwe endpoints:
|
||||
|
||||
Deze onderdelen horen niet in V1.1.
|
||||
|
||||
Status:
|
||||
|
||||
- deze onderdelen blijven expliciet buiten de afgeronde Phase 1 t/m Phase 3 scope
|
||||
- voor deze onderdelen bestaat in dit document geen aparte vervolgfase
|
||||
|
||||
### Write-acties
|
||||
|
||||
- mkdir
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
# Remote Client Shares V1.1 Design
|
||||
|
||||
## Status
|
||||
|
||||
Dit document beschrijft de V1.1-doelscope voor Remote Client Shares.
|
||||
|
||||
Per huidige repositorystatus valt de beschreven V1.1 read-mostly scope onder afgeronde implementatie van:
|
||||
|
||||
- client registry
|
||||
- browse via `/Clients`
|
||||
- file info
|
||||
- tekstpreview
|
||||
- eenvoudige image preview
|
||||
- download
|
||||
|
||||
De expliciet niet in V1.1 opgenomen onderdelen hieronder blijven buiten scope en vormen in dit document geen aparte vervolgfase.
|
||||
|
||||
## Doel
|
||||
|
||||
Een gebruiker van WebManager moet naast de bestaande server-side storage-roots ook een beperkte set lokale mappen van zijn eigen client-Mac kunnen benaderen, zonder de hele homefolder bloot te geven.
|
||||
@@ -88,6 +103,11 @@ Daarom mogen remote client shares niet in hetzelfde model worden gestopt als `ro
|
||||
- automatische LAN discovery
|
||||
- multi-user auth met OS user mapping
|
||||
|
||||
Status:
|
||||
|
||||
- deze lijst blijft expliciet uitgesloten van V1.1
|
||||
- dit document definieert hiervoor geen Phase 4 of andere vervolgfase
|
||||
|
||||
---
|
||||
|
||||
## Gewenste gebruikerservaring
|
||||
|
||||
@@ -0,0 +1,194 @@
|
||||
# Research: Remote Single-File Copy To Host
|
||||
|
||||
## Relevante file analysis
|
||||
|
||||
### Backend
|
||||
|
||||
- [routes_files.py](/workspace/webmanager-mvp/webui/backend/app/api/routes_files.py)
|
||||
Bevat de bestaande lokale upload-route (`POST /api/files/upload`) en de remote read-only Phase 3 routes (`view`, `info`, `download`, `image`) via `RemoteFileService`.
|
||||
- [routes_copy.py](/workspace/webmanager-mvp/webui/backend/app/api/routes_copy.py)
|
||||
Bevat de bestaande copy-route (`POST /api/files/copy`) die volledig uitgaat van host-side source en host-side destination.
|
||||
- [file_ops_service.py](/workspace/webmanager-mvp/webui/backend/app/services/file_ops_service.py)
|
||||
Bevat lokale file-acties. Relevant is vooral `upload()`, omdat die host-write doet na `PathGuard`-validatie van een doeldirectory.
|
||||
- [copy_task_service.py](/workspace/webmanager-mvp/webui/backend/app/services/copy_task_service.py)
|
||||
Bevat task-opbouw, destination-validatie en taakcreatie voor copy, maar gaat uit van een lokale bron die via `PathGuard` naar een host-pad resolveert.
|
||||
- [remote_file_service.py](/workspace/webmanager-mvp/webui/backend/app/services/remote_file_service.py)
|
||||
Bevat al de benodigde remote read-path parsing, share-validatie via registry, agent-auth, error mapping en een gestreamde `prepare_download()` naar de agent.
|
||||
- [filesystem_adapter.py](/workspace/webmanager-mvp/webui/backend/app/fs/filesystem_adapter.py)
|
||||
Bevat de feitelijke host-write helpers:
|
||||
- `write_uploaded_file(path, file_stream, overwrite=False)`
|
||||
- `copy_file(source, destination, on_progress=None)`
|
||||
`copy_file` vereist een lokale bron op de host en is dus niet bruikbaar voor remote input. `write_uploaded_file` schrijft een inkomende stream naar een hostpad en is conceptueel het dichtstbij.
|
||||
- [path_guard.py](/workspace/webmanager-mvp/webui/backend/app/security/path_guard.py)
|
||||
Houdt host-write validatie strikt lokaal. Dat moet zo blijven; remote paden mogen hier niet als bronsemantiek in terechtkomen.
|
||||
- [tasks_runner.py](/workspace/webmanager-mvp/webui/backend/app/tasks_runner.py)
|
||||
Bevat task-based copy/move uitvoering, maar alleen voor host-side bronpaden. Wel relevant als patroon voor een aparte remote-to-host worker.
|
||||
- [schemas.py](/workspace/webmanager-mvp/webui/backend/app/api/schemas.py)
|
||||
Bevat bestaande `CopyRequest` en upload/copy response-modellen. Voor een aparte feature is waarschijnlijk een nieuw requestmodel nodig.
|
||||
|
||||
### Frontend
|
||||
|
||||
- [app.js](/workspace/webmanager-mvp/webui/html/app.js)
|
||||
Relevante bestaande flows:
|
||||
- `uploadFileRequest()` gebruikt uitsluitend `/api/files/upload`
|
||||
- `startCopySelected()` gebruikt uitsluitend `/api/files/copy`
|
||||
- remote browse/view/download is al source-aware
|
||||
- remote copy is nu bewust geblokkeerd
|
||||
Dit bevestigt dat upload-flow en copy-flow momenteel twee losse UI-contracten zijn.
|
||||
|
||||
### Agent
|
||||
|
||||
- [finder_commander/app/main.py](/workspace/webmanager-mvp/finder_commander/app/main.py)
|
||||
Agent heeft al wat voor deze feature nodig is:
|
||||
- strikte `share + relative path` validatie
|
||||
- `GET /api/info`
|
||||
- `GET /api/download`
|
||||
Voor remote single-file copy naar host is geen nieuwe remote write-API nodig.
|
||||
|
||||
## Oordeel over hergebruik van upload-internals
|
||||
|
||||
### Bestaande upload-functionaliteit aanpassen?
|
||||
|
||||
Nee.
|
||||
|
||||
Reden:
|
||||
|
||||
- de bestaande upload-route, upload-requestvorm en upload-UI werken al goed
|
||||
- upload is browser -> host via multipart/form-data
|
||||
- de gewenste feature is agent/remote -> host via backend-proxy/stream
|
||||
- dat is een ander contract, andere foutbron en andere bronsemantiek
|
||||
|
||||
### Interne host-write logica hergebruiken?
|
||||
|
||||
Ja, maar alleen op intern helper/service-niveau.
|
||||
|
||||
Concreet oordeel:
|
||||
|
||||
- `FilesystemAdapter.copy_file()` is niet geschikt voor hergebruik
|
||||
Reden: vereist een lokale host-bronpad als source.
|
||||
- `FilesystemAdapter.write_uploaded_file()` is deels relevant
|
||||
Reden: dit doet precies de host-write van een inkomende stream naar een doelbestand.
|
||||
- Direct hergebruik van `FileOpsService.upload()` is niet verstandig
|
||||
Reden: die methode is semantisch en contractueel gekoppeld aan multipart upload en `UploadFile`.
|
||||
|
||||
Best passende richting:
|
||||
|
||||
- niet hergebruiken via bestaande upload-endpoints of upload-flow
|
||||
- wel overwegen om de onderliggende stream-naar-bestand write logica te hergebruiken of te veralgemeniseren in `FilesystemAdapter`
|
||||
- voorkeur: een nieuwe sibling-helper zoals `write_stream_file(...)` of een kleine interne extractie, zodat upload ongewijzigd blijft en remote copy dezelfde veilige host-write primitief kan gebruiken
|
||||
|
||||
## Ontwerpvoorstel
|
||||
|
||||
### Feature
|
||||
|
||||
`Copy remote file to host`
|
||||
|
||||
### Scope
|
||||
|
||||
- alleen single file
|
||||
- alleen source onder `/Clients/...`
|
||||
- alleen destination op host-side lokale map
|
||||
- geen mappen
|
||||
- geen overwrite in eerste change request tenzij expliciet gewenst
|
||||
- geen upload-route hergebruik
|
||||
- geen brede refactor
|
||||
|
||||
### Backendontwerp
|
||||
|
||||
Voeg een aparte backend feature toe, niet via `POST /api/files/upload` en niet via bestaande `POST /api/files/copy`.
|
||||
|
||||
Voorkeursvorm:
|
||||
|
||||
- nieuwe route, bijvoorbeeld `POST /api/files/remote-copy`
|
||||
- request bevat:
|
||||
- `source`: remote bestandspad onder `/Clients/...`
|
||||
- `destination_dir`: host-directory pad
|
||||
|
||||
Nieuwe service, bijvoorbeeld:
|
||||
|
||||
- `RemoteCopyToHostService`
|
||||
|
||||
Verantwoordelijkheden:
|
||||
|
||||
1. valideer dat `source` een remote `/Clients/...` file is
|
||||
2. valideer dat `destination_dir` een host-directory is via bestaande lokale `PathGuard`
|
||||
3. haal remote metadata op of resolve remote naam via bestaande `RemoteFileService`
|
||||
4. bouw destination pad als `destination_dir/<remote-filename>`
|
||||
5. faal op bestaand doelbestand in eerste versie
|
||||
6. open remote download-stream via aparte interne helper op `RemoteFileService`
|
||||
7. schrijf gestreamd naar host met een aparte interne host-write helper
|
||||
8. map fouten strikt:
|
||||
- remote unavailable blijft lokale actie-fout
|
||||
- host permission/path-conflict blijft gewone host-fout
|
||||
|
||||
### Aanbevolen interne hergebruikslijn
|
||||
|
||||
- laat `RemoteFileService` een interne streaming primitive aanbieden, bijvoorbeeld een variant op de huidige remote download-open logica zonder HTTP-response voor browser-download
|
||||
- laat `FilesystemAdapter` een aparte stream-write helper aanbieden voor generieke inkomende streams
|
||||
- laat upload zijn bestaande publieke route en flow behouden
|
||||
|
||||
### Frontendontwerp
|
||||
|
||||
Geen wijziging aan upload-UI.
|
||||
|
||||
Kleine aparte UI-feature:
|
||||
|
||||
- toon een aparte actie alleen als:
|
||||
- bronpane een remote file-selectie heeft van exact 1 bestand
|
||||
- doelpane op een host/local directory staat
|
||||
- de actie roept de nieuwe backend-route aan
|
||||
- na succes:
|
||||
- refresh beide panes
|
||||
- toon lokale foutmelding bij falen
|
||||
|
||||
Voorkeur:
|
||||
|
||||
- aparte actie of expliciete source-aware branch voor "Copy remote file to host"
|
||||
- niet de bestaande upload-flow hergebruiken
|
||||
|
||||
### Agentontwerp
|
||||
|
||||
Geen nieuwe agent-endpoints nodig in deze scope.
|
||||
|
||||
De bestaande `GET /api/download` is voldoende als read-only bron voor streaming.
|
||||
|
||||
## Acceptance criteria
|
||||
|
||||
- een enkel bestand onder `/Clients/...` kan naar een host-directory worden gekopieerd
|
||||
- de destination moet een host/local directory zijn
|
||||
- mappen als remote bron worden geweigerd
|
||||
- remote -> remote wordt geweigerd
|
||||
- host -> remote wordt geweigerd
|
||||
- overwrite gebeurt niet impliciet; bestaand doelbestand geeft een nette fout
|
||||
- bestaande upload-route, upload-contract en upload-UI blijven ongewijzigd
|
||||
- bestaande lokale copy-flow blijft ongewijzigd
|
||||
- remote fouten blijven lokaal tot deze actie
|
||||
- host-write blijft onder bestaande lokale `PathGuard`-regels vallen
|
||||
- data wordt gestreamd; geen volledige file-buffer in memory
|
||||
|
||||
## Klein plan
|
||||
|
||||
1. Voeg een research-backed change request toe voor een aparte route `POST /api/files/remote-copy`.
|
||||
2. Voeg een kleine service toe die alleen remote single-file source + local destination_dir ondersteunt.
|
||||
3. Voeg een interne streaming helper toe in `RemoteFileService` voor remote bestand-inname door backend.
|
||||
4. Voeg een aparte interne host-write helper toe in `FilesystemAdapter` voor generieke stream-naar-bestand writes, zonder upload-API te wijzigen.
|
||||
5. Voeg minimale frontend wiring toe voor een aparte "Copy remote file to host"-actie.
|
||||
6. Test stapsgewijs:
|
||||
- success path remote file -> local dir
|
||||
- bestaand doelbestand
|
||||
- remote directory rejected
|
||||
- remote failure stays local
|
||||
- upload-regressie: bestaande `/api/files/upload` blijft ongewijzigd
|
||||
|
||||
## Expliciete lijst van wat buiten scope blijft
|
||||
|
||||
- remote mappen kopiëren
|
||||
- remote write-acties
|
||||
- remote -> remote
|
||||
- host -> remote
|
||||
- aanpassing van bestaande upload-routes
|
||||
- aanpassing van upload-requestcontract
|
||||
- aanpassing van upload-UI
|
||||
- brede refactor van copy/upload/task-infrastructuur
|
||||
- bookmarks/startup paths
|
||||
- remote task-runner verbreding buiten deze ene actie
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, Header
|
||||
|
||||
from backend.app.api.schemas import (
|
||||
RemoteClientHeartbeatRequest,
|
||||
RemoteClientItem,
|
||||
RemoteClientListResponse,
|
||||
RemoteClientRegisterRequest,
|
||||
)
|
||||
from backend.app.dependencies import get_remote_client_service
|
||||
from backend.app.services.remote_client_service import RemoteClientService
|
||||
|
||||
router = APIRouter(prefix="/clients")
|
||||
|
||||
|
||||
@router.get("", response_model=RemoteClientListResponse)
|
||||
async def list_clients(
|
||||
service: RemoteClientService = Depends(get_remote_client_service),
|
||||
) -> RemoteClientListResponse:
|
||||
return service.list_clients()
|
||||
|
||||
|
||||
@router.post("/register", response_model=RemoteClientItem)
|
||||
async def register_client(
|
||||
request: RemoteClientRegisterRequest,
|
||||
authorization: str | None = Header(default=None),
|
||||
service: RemoteClientService = Depends(get_remote_client_service),
|
||||
) -> RemoteClientItem:
|
||||
return service.register_client(authorization=authorization, request=request)
|
||||
|
||||
|
||||
@router.post("/heartbeat", response_model=RemoteClientItem)
|
||||
async def heartbeat(
|
||||
request: RemoteClientHeartbeatRequest,
|
||||
authorization: str | None = Header(default=None),
|
||||
service: RemoteClientService = Depends(get_remote_client_service),
|
||||
) -> RemoteClientItem:
|
||||
return service.record_heartbeat(authorization=authorization, request=request)
|
||||
@@ -5,10 +5,11 @@ from fastapi.responses import StreamingResponse
|
||||
from starlette.background import BackgroundTask
|
||||
|
||||
from backend.app.api.schemas import ArchivePrepareRequest, DeleteRequest, FileInfoResponse, MkdirRequest, MkdirResponse, RenameRequest, RenameResponse, SaveRequest, SaveResponse, TaskCreateResponse, TaskDetailResponse, UploadResponse, ViewResponse
|
||||
from backend.app.dependencies import get_archive_download_task_service, get_delete_task_service, get_file_ops_service
|
||||
from backend.app.dependencies import get_archive_download_task_service, get_delete_task_service, get_file_ops_service, get_remote_file_service
|
||||
from backend.app.services.archive_download_task_service import ArchiveDownloadTaskService
|
||||
from backend.app.services.delete_task_service import DeleteTaskService
|
||||
from backend.app.services.file_ops_service import FileOpsService
|
||||
from backend.app.services.remote_file_service import RemoteFileService
|
||||
|
||||
router = APIRouter(prefix="/files")
|
||||
|
||||
@@ -54,7 +55,10 @@ async def view(
|
||||
path: str,
|
||||
for_edit: bool = False,
|
||||
service: FileOpsService = Depends(get_file_ops_service),
|
||||
remote_service: RemoteFileService = Depends(get_remote_file_service),
|
||||
) -> ViewResponse:
|
||||
if remote_service.handles_path(path):
|
||||
return remote_service.view(path=path, for_edit=for_edit)
|
||||
return service.view(path=path, for_edit=for_edit)
|
||||
|
||||
|
||||
@@ -62,7 +66,10 @@ async def view(
|
||||
async def info(
|
||||
path: str,
|
||||
service: FileOpsService = Depends(get_file_ops_service),
|
||||
remote_service: RemoteFileService = Depends(get_remote_file_service),
|
||||
) -> FileInfoResponse:
|
||||
if remote_service.handles_path(path):
|
||||
return remote_service.info(path=path)
|
||||
return service.info(path=path)
|
||||
|
||||
|
||||
@@ -70,8 +77,9 @@ async def info(
|
||||
async def download(
|
||||
path: list[str] = Query(...),
|
||||
service: FileOpsService = Depends(get_file_ops_service),
|
||||
remote_service: RemoteFileService = Depends(get_remote_file_service),
|
||||
) -> StreamingResponse:
|
||||
prepared = service.prepare_download(paths=path)
|
||||
prepared = remote_service.prepare_download(paths=path) if any(remote_service.handles_path(item) for item in path) else service.prepare_download(paths=path)
|
||||
response = StreamingResponse(
|
||||
prepared["content"],
|
||||
headers=prepared["headers"],
|
||||
@@ -143,7 +151,15 @@ async def pdf(
|
||||
async def image(
|
||||
path: str,
|
||||
service: FileOpsService = Depends(get_file_ops_service),
|
||||
remote_service: RemoteFileService = Depends(get_remote_file_service),
|
||||
) -> StreamingResponse:
|
||||
if remote_service.handles_path(path):
|
||||
prepared = remote_service.prepare_image_stream(path=path)
|
||||
return StreamingResponse(
|
||||
prepared["content"],
|
||||
headers=prepared["headers"],
|
||||
media_type=prepared["content_type"],
|
||||
)
|
||||
prepared = service.prepare_image_stream(path=path)
|
||||
return StreamingResponse(
|
||||
prepared["content"],
|
||||
|
||||
@@ -238,3 +238,41 @@ class SearchResultItem(BaseModel):
|
||||
class SearchResponse(BaseModel):
|
||||
items: list[SearchResultItem]
|
||||
truncated: bool
|
||||
|
||||
|
||||
class RemoteClientShare(BaseModel):
|
||||
key: str
|
||||
label: str
|
||||
|
||||
|
||||
class RemoteClientRegisterRequest(BaseModel):
|
||||
client_id: str
|
||||
display_name: str
|
||||
platform: str
|
||||
agent_version: str
|
||||
endpoint: str
|
||||
shares: list[RemoteClientShare]
|
||||
|
||||
|
||||
class RemoteClientHeartbeatRequest(BaseModel):
|
||||
client_id: str
|
||||
agent_version: str
|
||||
|
||||
|
||||
class RemoteClientItem(BaseModel):
|
||||
client_id: str
|
||||
display_name: str
|
||||
platform: str
|
||||
agent_version: str
|
||||
endpoint: str
|
||||
shares: list[RemoteClientShare]
|
||||
last_seen: str | None = None
|
||||
status: str
|
||||
last_error: str | None = None
|
||||
reachable_at: str | None = None
|
||||
created_at: str
|
||||
updated_at: str
|
||||
|
||||
|
||||
class RemoteClientListResponse(BaseModel):
|
||||
items: list[RemoteClientItem]
|
||||
|
||||
@@ -9,6 +9,11 @@ from pathlib import Path
|
||||
class Settings:
|
||||
root_aliases: dict[str, str]
|
||||
task_db_path: str
|
||||
remote_client_registration_token: str
|
||||
remote_client_offline_timeout_seconds: int
|
||||
remote_client_agent_auth_header: str
|
||||
remote_client_agent_auth_scheme: str
|
||||
remote_client_agent_auth_token: str
|
||||
|
||||
|
||||
DEFAULT_ROOT_ALIASES = {
|
||||
@@ -40,4 +45,18 @@ def get_settings() -> Settings:
|
||||
task_db_path = os.getenv("WEBMANAGER_TASK_DB_PATH", default_task_db_path).strip()
|
||||
if not task_db_path:
|
||||
task_db_path = default_task_db_path
|
||||
return Settings(root_aliases=_load_root_aliases(), task_db_path=task_db_path)
|
||||
raw_offline_timeout = os.getenv("WEBMANAGER_REMOTE_CLIENT_OFFLINE_TIMEOUT_SECONDS", "60").strip()
|
||||
try:
|
||||
remote_client_offline_timeout_seconds = max(1, int(raw_offline_timeout))
|
||||
except ValueError:
|
||||
remote_client_offline_timeout_seconds = 60
|
||||
return Settings(
|
||||
root_aliases=_load_root_aliases(),
|
||||
task_db_path=task_db_path,
|
||||
remote_client_registration_token=os.getenv("WEBMANAGER_REMOTE_CLIENT_REGISTRATION_TOKEN", "").strip(),
|
||||
remote_client_offline_timeout_seconds=remote_client_offline_timeout_seconds,
|
||||
remote_client_agent_auth_header=os.getenv("WEBMANAGER_REMOTE_CLIENT_AGENT_AUTH_HEADER", "Authorization").strip()
|
||||
or "Authorization",
|
||||
remote_client_agent_auth_scheme=os.getenv("WEBMANAGER_REMOTE_CLIENT_AGENT_AUTH_SCHEME", "Bearer").strip() or "Bearer",
|
||||
remote_client_agent_auth_token=os.getenv("WEBMANAGER_REMOTE_CLIENT_AGENT_AUTH_TOKEN", "").strip(),
|
||||
)
|
||||
|
||||
@@ -0,0 +1,201 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class RemoteClientRepository:
|
||||
def __init__(self, db_path: str):
|
||||
self._db_path = db_path
|
||||
self._ensure_schema()
|
||||
|
||||
def upsert_client(
|
||||
self,
|
||||
*,
|
||||
client_id: str,
|
||||
display_name: str,
|
||||
platform: str,
|
||||
agent_version: str,
|
||||
endpoint: str,
|
||||
shares: list[dict[str, str]],
|
||||
now_iso: str,
|
||||
) -> dict:
|
||||
shares_json = self._encode_shares(shares)
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO remote_clients (
|
||||
client_id, display_name, platform, agent_version, endpoint, shares_json,
|
||||
last_seen, status, last_error, reachable_at, created_at, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(client_id) DO UPDATE SET
|
||||
display_name = excluded.display_name,
|
||||
platform = excluded.platform,
|
||||
agent_version = excluded.agent_version,
|
||||
endpoint = excluded.endpoint,
|
||||
shares_json = excluded.shares_json,
|
||||
last_seen = excluded.last_seen,
|
||||
status = excluded.status,
|
||||
last_error = NULL,
|
||||
updated_at = excluded.updated_at
|
||||
""",
|
||||
(
|
||||
client_id,
|
||||
display_name,
|
||||
platform,
|
||||
agent_version,
|
||||
endpoint,
|
||||
shares_json,
|
||||
now_iso,
|
||||
"online",
|
||||
None,
|
||||
None,
|
||||
now_iso,
|
||||
now_iso,
|
||||
),
|
||||
)
|
||||
row = conn.execute("SELECT * FROM remote_clients WHERE client_id = ?", (client_id,)).fetchone()
|
||||
return self._to_dict(row)
|
||||
|
||||
def record_heartbeat(self, *, client_id: str, agent_version: str, now_iso: str) -> dict | None:
|
||||
with self._connection() as conn:
|
||||
cursor = conn.execute(
|
||||
"""
|
||||
UPDATE remote_clients
|
||||
SET agent_version = ?, last_seen = ?, status = ?, updated_at = ?
|
||||
WHERE client_id = ?
|
||||
""",
|
||||
(agent_version, now_iso, "online", now_iso, client_id),
|
||||
)
|
||||
if cursor.rowcount <= 0:
|
||||
return None
|
||||
row = conn.execute("SELECT * FROM remote_clients WHERE client_id = ?", (client_id,)).fetchone()
|
||||
return self._to_dict(row)
|
||||
|
||||
def mark_stale_clients_offline(self, *, cutoff_iso: str, now_iso: str) -> None:
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
UPDATE remote_clients
|
||||
SET status = ?, updated_at = ?
|
||||
WHERE status != ? AND last_seen IS NOT NULL AND last_seen < ?
|
||||
""",
|
||||
("offline", now_iso, "offline", cutoff_iso),
|
||||
)
|
||||
|
||||
def list_clients(self) -> list[dict]:
|
||||
with self._connection() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT *
|
||||
FROM remote_clients
|
||||
ORDER BY LOWER(display_name) ASC, client_id ASC
|
||||
"""
|
||||
).fetchall()
|
||||
return [self._to_dict(row) for row in rows]
|
||||
|
||||
def get_client(self, client_id: str) -> dict | None:
|
||||
with self._connection() as conn:
|
||||
row = conn.execute(
|
||||
"""
|
||||
SELECT *
|
||||
FROM remote_clients
|
||||
WHERE client_id = ?
|
||||
""",
|
||||
(client_id,),
|
||||
).fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
return self._to_dict(row)
|
||||
|
||||
def _ensure_schema(self) -> None:
|
||||
db_path = Path(self._db_path)
|
||||
if db_path.parent and str(db_path.parent) not in {"", "."}:
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with self._connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS remote_clients (
|
||||
client_id TEXT PRIMARY KEY,
|
||||
display_name TEXT NOT NULL,
|
||||
platform TEXT NOT NULL,
|
||||
agent_version TEXT NOT NULL,
|
||||
endpoint TEXT NOT NULL,
|
||||
shares_json TEXT NOT NULL,
|
||||
last_seen TEXT NULL,
|
||||
status TEXT NOT NULL,
|
||||
last_error TEXT NULL,
|
||||
reachable_at TEXT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_remote_clients_display_name
|
||||
ON remote_clients(display_name)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_remote_clients_last_seen
|
||||
ON remote_clients(last_seen)
|
||||
"""
|
||||
)
|
||||
|
||||
@contextmanager
|
||||
def _connection(self):
|
||||
conn = sqlite3.connect(self._db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
@classmethod
|
||||
def _to_dict(cls, row: sqlite3.Row) -> dict:
|
||||
return {
|
||||
"client_id": row["client_id"],
|
||||
"display_name": row["display_name"],
|
||||
"platform": row["platform"],
|
||||
"agent_version": row["agent_version"],
|
||||
"endpoint": row["endpoint"],
|
||||
"shares": cls._decode_shares(row["shares_json"]),
|
||||
"last_seen": row["last_seen"],
|
||||
"status": row["status"],
|
||||
"last_error": row["last_error"],
|
||||
"reachable_at": row["reachable_at"],
|
||||
"created_at": row["created_at"],
|
||||
"updated_at": row["updated_at"],
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _encode_shares(shares: list[dict[str, str]]) -> str:
|
||||
return json.dumps(shares, separators=(",", ":"), sort_keys=True)
|
||||
|
||||
@staticmethod
|
||||
def _decode_shares(raw: str) -> list[dict[str, str]]:
|
||||
parsed = json.loads(raw or "[]")
|
||||
if not isinstance(parsed, list):
|
||||
return []
|
||||
normalized: list[dict[str, str]] = []
|
||||
for item in parsed:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
key = str(item.get("key", "")).strip()
|
||||
label = str(item.get("label", "")).strip()
|
||||
if key and label:
|
||||
normalized.append({"key": key, "label": label})
|
||||
return normalized
|
||||
|
||||
@staticmethod
|
||||
def now_iso() -> str:
|
||||
return datetime.now(tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
@@ -6,6 +6,7 @@ from pathlib import Path
|
||||
from backend.app.config import Settings, get_settings
|
||||
from backend.app.db.bookmark_repository import BookmarkRepository
|
||||
from backend.app.db.history_repository import HistoryRepository
|
||||
from backend.app.db.remote_client_repository import RemoteClientRepository
|
||||
from backend.app.db.settings_repository import SettingsRepository
|
||||
from backend.app.db.task_repository import TaskRepository
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
@@ -19,6 +20,9 @@ from backend.app.services.duplicate_task_service import DuplicateTaskService
|
||||
from backend.app.services.file_ops_service import FileOpsService
|
||||
from backend.app.services.history_service import HistoryService
|
||||
from backend.app.services.move_task_service import MoveTaskService
|
||||
from backend.app.services.remote_browse_service import RemoteBrowseService
|
||||
from backend.app.services.remote_client_service import RemoteClientService
|
||||
from backend.app.services.remote_file_service import RemoteFileService
|
||||
from backend.app.services.search_service import SearchService
|
||||
from backend.app.services.settings_service import SettingsService
|
||||
from backend.app.services.task_service import TaskService
|
||||
@@ -59,6 +63,12 @@ def get_settings_repository() -> SettingsRepository:
|
||||
return SettingsRepository(db_path=settings.task_db_path)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_remote_client_repository() -> RemoteClientRepository:
|
||||
settings: Settings = get_settings()
|
||||
return RemoteClientRepository(db_path=settings.task_db_path)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_task_runner() -> TaskRunner:
|
||||
return TaskRunner(
|
||||
@@ -75,7 +85,11 @@ def get_archive_artifact_root() -> str:
|
||||
|
||||
|
||||
async def get_browse_service() -> BrowseService:
|
||||
return BrowseService(path_guard=get_path_guard(), filesystem=get_filesystem_adapter())
|
||||
return BrowseService(
|
||||
path_guard=get_path_guard(),
|
||||
filesystem=get_filesystem_adapter(),
|
||||
remote_browse_service=await get_remote_browse_service(),
|
||||
)
|
||||
|
||||
|
||||
async def get_file_ops_service() -> FileOpsService:
|
||||
@@ -155,3 +169,32 @@ async def get_search_service() -> SearchService:
|
||||
|
||||
async def get_settings_service() -> SettingsService:
|
||||
return SettingsService(repository=get_settings_repository(), path_guard=get_path_guard())
|
||||
|
||||
|
||||
async def get_remote_client_service() -> RemoteClientService:
|
||||
settings: Settings = get_settings()
|
||||
return RemoteClientService(
|
||||
repository=get_remote_client_repository(),
|
||||
registration_token=settings.remote_client_registration_token,
|
||||
offline_timeout_seconds=settings.remote_client_offline_timeout_seconds,
|
||||
)
|
||||
|
||||
|
||||
async def get_remote_browse_service() -> RemoteBrowseService:
|
||||
settings: Settings = get_settings()
|
||||
return RemoteBrowseService(
|
||||
remote_client_service=await get_remote_client_service(),
|
||||
agent_auth_header=settings.remote_client_agent_auth_header,
|
||||
agent_auth_scheme=settings.remote_client_agent_auth_scheme,
|
||||
agent_auth_token=settings.remote_client_agent_auth_token,
|
||||
)
|
||||
|
||||
|
||||
async def get_remote_file_service() -> RemoteFileService:
|
||||
settings: Settings = get_settings()
|
||||
return RemoteFileService(
|
||||
remote_client_service=await get_remote_client_service(),
|
||||
agent_auth_header=settings.remote_client_agent_auth_header,
|
||||
agent_auth_scheme=settings.remote_client_agent_auth_scheme,
|
||||
agent_auth_token=settings.remote_client_agent_auth_token,
|
||||
)
|
||||
|
||||
@@ -10,6 +10,7 @@ from backend.app.api.errors import AppError
|
||||
from backend.app.api.routes_bookmarks import router as bookmarks_router
|
||||
from backend.app.api.routes_browse import router as browse_router
|
||||
from backend.app.api.routes_copy import router as copy_router
|
||||
from backend.app.api.routes_clients import router as clients_router
|
||||
from backend.app.api.routes_duplicate import router as duplicate_router
|
||||
from backend.app.api.routes_files import router as files_router
|
||||
from backend.app.api.routes_history import router as history_router
|
||||
@@ -33,6 +34,7 @@ app.mount("/ui", StaticFiles(directory=str(UI_DIR), html=True), name="ui")
|
||||
app.include_router(browse_router, prefix="/api")
|
||||
app.include_router(files_router, prefix="/api")
|
||||
app.include_router(copy_router, prefix="/api")
|
||||
app.include_router(clients_router, prefix="/api")
|
||||
app.include_router(duplicate_router, prefix="/api")
|
||||
app.include_router(move_router, prefix="/api")
|
||||
app.include_router(search_router, prefix="/api")
|
||||
|
||||
Binary file not shown.
@@ -3,14 +3,24 @@ from __future__ import annotations
|
||||
from backend.app.api.schemas import BrowseResponse, DirectoryEntry, FileEntry
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.remote_browse_service import RemoteBrowseService
|
||||
|
||||
|
||||
class BrowseService:
|
||||
def __init__(self, path_guard: PathGuard, filesystem: FilesystemAdapter):
|
||||
def __init__(
|
||||
self,
|
||||
path_guard: PathGuard,
|
||||
filesystem: FilesystemAdapter,
|
||||
remote_browse_service: RemoteBrowseService | None = None,
|
||||
):
|
||||
self._path_guard = path_guard
|
||||
self._filesystem = filesystem
|
||||
self._remote_browse_service = remote_browse_service
|
||||
|
||||
def browse(self, path: str, show_hidden: bool) -> BrowseResponse:
|
||||
if self._remote_browse_service and self._remote_browse_service.handles_path(path):
|
||||
return self._remote_browse_service.browse(path=path, show_hidden=show_hidden)
|
||||
|
||||
if self._path_guard.is_virtual_volumes_path(path):
|
||||
directories = [
|
||||
DirectoryEntry(name=item["name"], path=item["path"], modified="")
|
||||
|
||||
@@ -0,0 +1,201 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import httpx
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.schemas import BrowseResponse, DirectoryEntry, FileEntry, RemoteClientItem
|
||||
from backend.app.services.remote_client_service import RemoteClientService
|
||||
|
||||
|
||||
class RemoteBrowseService:
|
||||
ROOT_PATH = "/Clients"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
remote_client_service: RemoteClientService,
|
||||
agent_auth_header: str,
|
||||
agent_auth_scheme: str,
|
||||
agent_auth_token: str,
|
||||
agent_timeout_seconds: float = 2.0,
|
||||
):
|
||||
self._remote_client_service = remote_client_service
|
||||
self._agent_auth_header = (agent_auth_header or "Authorization").strip() or "Authorization"
|
||||
self._agent_auth_scheme = (agent_auth_scheme or "Bearer").strip() or "Bearer"
|
||||
self._agent_auth_token = (agent_auth_token or "").strip()
|
||||
self._agent_timeout_seconds = max(0.1, float(agent_timeout_seconds))
|
||||
|
||||
@classmethod
|
||||
def handles_path(cls, path: str) -> bool:
|
||||
normalized = (path or "").strip()
|
||||
return normalized == cls.ROOT_PATH or normalized.startswith(f"{cls.ROOT_PATH}/")
|
||||
|
||||
def browse(self, path: str, show_hidden: bool) -> BrowseResponse:
|
||||
parts = self._path_parts(path)
|
||||
if not parts:
|
||||
return self._browse_clients_root()
|
||||
if len(parts) == 1:
|
||||
return self._browse_client(parts[0])
|
||||
return self._browse_remote_share(parts[0], parts[1], parts[2:], show_hidden)
|
||||
|
||||
@classmethod
|
||||
def _path_parts(cls, path: str) -> list[str]:
|
||||
normalized = (path or "").strip().rstrip("/")
|
||||
if normalized == cls.ROOT_PATH:
|
||||
return []
|
||||
return normalized[len(cls.ROOT_PATH) + 1 :].split("/")
|
||||
|
||||
def _browse_clients_root(self) -> BrowseResponse:
|
||||
clients = self._remote_client_service.list_clients().items
|
||||
directories = [
|
||||
DirectoryEntry(
|
||||
name=client.display_name,
|
||||
path=f"{self.ROOT_PATH}/{client.client_id}",
|
||||
modified=client.last_seen or client.updated_at,
|
||||
)
|
||||
for client in clients
|
||||
]
|
||||
return BrowseResponse(path=self.ROOT_PATH, directories=directories, files=[])
|
||||
|
||||
def _browse_client(self, client_id: str) -> BrowseResponse:
|
||||
client = self._remote_client_service.get_client(client_id)
|
||||
directories = [
|
||||
DirectoryEntry(
|
||||
name=share.label,
|
||||
path=f"{self.ROOT_PATH}/{client.client_id}/{share.key}",
|
||||
modified=client.last_seen or client.updated_at,
|
||||
)
|
||||
for share in client.shares
|
||||
]
|
||||
return BrowseResponse(path=f"{self.ROOT_PATH}/{client.client_id}", directories=directories, files=[])
|
||||
|
||||
def _browse_remote_share(
|
||||
self,
|
||||
client_id: str,
|
||||
share_key: str,
|
||||
relative_parts: list[str],
|
||||
show_hidden: bool,
|
||||
) -> BrowseResponse:
|
||||
client = self._remote_client_service.get_client(client_id)
|
||||
if client.status != "online":
|
||||
raise AppError(
|
||||
code="remote_client_unavailable",
|
||||
message=f"Remote client '{client.display_name}' is offline",
|
||||
status_code=503,
|
||||
details={"client_id": client.client_id, "status": client.status},
|
||||
)
|
||||
share = next((item for item in client.shares if item.key == share_key), None)
|
||||
if share is None:
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Remote share was not found",
|
||||
status_code=404,
|
||||
details={"client_id": client.client_id, "share_key": share_key},
|
||||
)
|
||||
if not self._agent_auth_token:
|
||||
raise AppError(
|
||||
code="remote_client_agent_auth_not_configured",
|
||||
message="Remote client agent auth token is not configured",
|
||||
status_code=503,
|
||||
details={"client_id": client.client_id},
|
||||
)
|
||||
|
||||
base_path = f"{self.ROOT_PATH}/{client.client_id}/{share.key}"
|
||||
relative_path = "/".join(relative_parts)
|
||||
agent_payload = self._fetch_remote_listing(client=client, share_key=share.key, relative_path=relative_path, show_hidden=show_hidden)
|
||||
|
||||
directories: list[DirectoryEntry] = []
|
||||
files: list[FileEntry] = []
|
||||
for entry in agent_payload.get("entries", []):
|
||||
if not isinstance(entry, dict):
|
||||
continue
|
||||
name = str(entry.get("name", "")).strip()
|
||||
kind = str(entry.get("kind", "")).strip()
|
||||
if not name or kind not in {"directory", "file"}:
|
||||
continue
|
||||
child_path = f"{base_path}/{name}"
|
||||
modified = str(entry.get("modified", "") or "")
|
||||
if kind == "directory":
|
||||
directories.append(DirectoryEntry(name=name, path=child_path, modified=modified))
|
||||
continue
|
||||
size = entry.get("size", 0)
|
||||
try:
|
||||
normalized_size = max(0, int(size))
|
||||
except (TypeError, ValueError):
|
||||
normalized_size = 0
|
||||
files.append(FileEntry(name=name, path=child_path, size=normalized_size, modified=modified))
|
||||
|
||||
response_path = base_path if not relative_path else f"{base_path}/{relative_path}"
|
||||
return BrowseResponse(path=response_path, directories=directories, files=files)
|
||||
|
||||
def _fetch_remote_listing(
|
||||
self,
|
||||
*,
|
||||
client: RemoteClientItem,
|
||||
share_key: str,
|
||||
relative_path: str,
|
||||
show_hidden: bool,
|
||||
) -> dict:
|
||||
normalized_endpoint = client.endpoint.rstrip("/")
|
||||
query = urlencode({"share": share_key, "path": relative_path, "show_hidden": str(show_hidden).lower()})
|
||||
url = f"{normalized_endpoint}/api/list?{query}"
|
||||
headers = {self._agent_auth_header: f"{self._agent_auth_scheme} {self._agent_auth_token}"}
|
||||
timeout = httpx.Timeout(self._agent_timeout_seconds, connect=self._agent_timeout_seconds)
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=timeout, headers=headers) as client_http:
|
||||
response = client_http.get(url)
|
||||
except httpx.TimeoutException as exc:
|
||||
raise AppError(
|
||||
code="remote_client_timeout",
|
||||
message=f"Remote client '{client.display_name}' timed out",
|
||||
status_code=504,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
) from exc
|
||||
except httpx.HTTPError as exc:
|
||||
raise AppError(
|
||||
code="remote_client_unreachable",
|
||||
message=f"Remote client '{client.display_name}' is unreachable",
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
) from exc
|
||||
|
||||
if response.status_code == 404:
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Remote path was not found",
|
||||
status_code=404,
|
||||
details={"client_id": client.client_id, "share_key": share_key},
|
||||
)
|
||||
if response.status_code in {401, 403}:
|
||||
raise AppError(
|
||||
code="remote_client_forbidden",
|
||||
message=f"Remote client '{client.display_name}' rejected authentication",
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
)
|
||||
if response.status_code >= 400:
|
||||
raise AppError(
|
||||
code="remote_client_error",
|
||||
message=f"Remote client '{client.display_name}' browse failed",
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint, "status_code": str(response.status_code)},
|
||||
)
|
||||
try:
|
||||
payload = response.json()
|
||||
except ValueError as exc:
|
||||
raise AppError(
|
||||
code="remote_client_error",
|
||||
message=f"Remote client '{client.display_name}' returned invalid JSON",
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
) from exc
|
||||
if not isinstance(payload, dict):
|
||||
raise AppError(
|
||||
code="remote_client_error",
|
||||
message=f"Remote client '{client.display_name}' returned an invalid response",
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
)
|
||||
return payload
|
||||
@@ -0,0 +1,151 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Callable
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.schemas import (
|
||||
RemoteClientHeartbeatRequest,
|
||||
RemoteClientItem,
|
||||
RemoteClientListResponse,
|
||||
RemoteClientRegisterRequest,
|
||||
)
|
||||
from backend.app.db.remote_client_repository import RemoteClientRepository
|
||||
|
||||
|
||||
class RemoteClientService:
|
||||
def __init__(
|
||||
self,
|
||||
repository: RemoteClientRepository,
|
||||
registration_token: str,
|
||||
offline_timeout_seconds: int,
|
||||
now: Callable[[], datetime] | None = None,
|
||||
):
|
||||
self._repository = repository
|
||||
self._registration_token = registration_token.strip()
|
||||
self._offline_timeout_seconds = max(1, int(offline_timeout_seconds))
|
||||
self._now = now or (lambda: datetime.now(tz=timezone.utc))
|
||||
|
||||
def list_clients(self) -> RemoteClientListResponse:
|
||||
self._refresh_stale_statuses()
|
||||
items = [RemoteClientItem(**row) for row in self._repository.list_clients()]
|
||||
return RemoteClientListResponse(items=items)
|
||||
|
||||
def get_client(self, client_id: str) -> RemoteClientItem:
|
||||
normalized_client_id = (client_id or "").strip()
|
||||
if not normalized_client_id:
|
||||
raise AppError(
|
||||
code="invalid_request",
|
||||
message="client_id is required",
|
||||
status_code=400,
|
||||
details={"client_id": client_id},
|
||||
)
|
||||
self._refresh_stale_statuses()
|
||||
item = self._repository.get_client(normalized_client_id)
|
||||
if item is None:
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Remote client was not found",
|
||||
status_code=404,
|
||||
details={"client_id": normalized_client_id},
|
||||
)
|
||||
return RemoteClientItem(**item)
|
||||
|
||||
def register_client(self, authorization: str | None, request: RemoteClientRegisterRequest) -> RemoteClientItem:
|
||||
self._require_registration_auth(authorization)
|
||||
payload = self._normalize_register_request(request)
|
||||
now_iso = self._to_iso(self._now())
|
||||
item = self._repository.upsert_client(now_iso=now_iso, **payload)
|
||||
return RemoteClientItem(**item)
|
||||
|
||||
def record_heartbeat(self, authorization: str | None, request: RemoteClientHeartbeatRequest) -> RemoteClientItem:
|
||||
self._require_registration_auth(authorization)
|
||||
client_id = (request.client_id or "").strip()
|
||||
agent_version = (request.agent_version or "").strip()
|
||||
if not client_id:
|
||||
raise AppError(
|
||||
code="invalid_request",
|
||||
message="client_id is required",
|
||||
status_code=400,
|
||||
details={"client_id": request.client_id},
|
||||
)
|
||||
if not agent_version:
|
||||
raise AppError(
|
||||
code="invalid_request",
|
||||
message="agent_version is required",
|
||||
status_code=400,
|
||||
details={"agent_version": request.agent_version},
|
||||
)
|
||||
item = self._repository.record_heartbeat(
|
||||
client_id=client_id,
|
||||
agent_version=agent_version,
|
||||
now_iso=self._to_iso(self._now()),
|
||||
)
|
||||
if item is None:
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Remote client was not found",
|
||||
status_code=404,
|
||||
details={"client_id": client_id},
|
||||
)
|
||||
return RemoteClientItem(**item)
|
||||
|
||||
def _require_registration_auth(self, authorization: str | None) -> None:
|
||||
if not self._registration_token:
|
||||
raise AppError(
|
||||
code="remote_client_registration_disabled",
|
||||
message="Remote client registration is not configured",
|
||||
status_code=503,
|
||||
)
|
||||
expected = f"Bearer {self._registration_token}"
|
||||
if (authorization or "").strip() != expected:
|
||||
raise AppError(
|
||||
code="forbidden",
|
||||
message="Invalid remote client registration token",
|
||||
status_code=403,
|
||||
)
|
||||
|
||||
def _normalize_register_request(self, request: RemoteClientRegisterRequest) -> dict:
|
||||
client_id = (request.client_id or "").strip()
|
||||
display_name = (request.display_name or "").strip()
|
||||
platform = (request.platform or "").strip()
|
||||
agent_version = (request.agent_version or "").strip()
|
||||
endpoint = (request.endpoint or "").strip()
|
||||
shares = [
|
||||
{"key": (item.key or "").strip(), "label": (item.label or "").strip()}
|
||||
for item in request.shares
|
||||
]
|
||||
shares = [item for item in shares if item["key"] and item["label"]]
|
||||
|
||||
if not client_id:
|
||||
raise AppError("invalid_request", "client_id is required", 400, {"client_id": request.client_id})
|
||||
if not display_name:
|
||||
raise AppError("invalid_request", "display_name is required", 400, {"display_name": request.display_name})
|
||||
if not platform:
|
||||
raise AppError("invalid_request", "platform is required", 400, {"platform": request.platform})
|
||||
if not agent_version:
|
||||
raise AppError("invalid_request", "agent_version is required", 400, {"agent_version": request.agent_version})
|
||||
if not endpoint:
|
||||
raise AppError("invalid_request", "endpoint is required", 400, {"endpoint": request.endpoint})
|
||||
if not shares:
|
||||
raise AppError("invalid_request", "at least one share is required", 400, {"shares": "[]"})
|
||||
|
||||
return {
|
||||
"client_id": client_id,
|
||||
"display_name": display_name,
|
||||
"platform": platform,
|
||||
"agent_version": agent_version,
|
||||
"endpoint": endpoint,
|
||||
"shares": shares,
|
||||
}
|
||||
|
||||
def _refresh_stale_statuses(self) -> None:
|
||||
now = self._now()
|
||||
self._repository.mark_stale_clients_offline(
|
||||
cutoff_iso=self._to_iso(now - timedelta(seconds=self._offline_timeout_seconds)),
|
||||
now_iso=self._to_iso(now),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _to_iso(value: datetime) -> str:
|
||||
return value.astimezone(timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
@@ -0,0 +1,432 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import PurePosixPath
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import httpx
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.api.schemas import FileInfoResponse, RemoteClientItem, ViewResponse
|
||||
from backend.app.services.remote_browse_service import RemoteBrowseService
|
||||
from backend.app.services.remote_client_service import RemoteClientService
|
||||
|
||||
REMOTE_TEXT_PREVIEW_MAX_BYTES = 256 * 1024
|
||||
REMOTE_AGENT_TIMEOUT_SECONDS = 2.0
|
||||
REMOTE_DOWNLOAD_READ_TIMEOUT_SECONDS = 5.0
|
||||
REMOTE_STREAM_CHUNK_BYTES = 64 * 1024
|
||||
TEXT_CONTENT_TYPES = {
|
||||
".txt": "text/plain",
|
||||
".log": "text/plain",
|
||||
".conf": "text/plain",
|
||||
".ini": "text/plain",
|
||||
".cfg": "text/plain",
|
||||
".md": "text/markdown",
|
||||
".yml": "text/yaml",
|
||||
".yaml": "text/yaml",
|
||||
".json": "application/json",
|
||||
".js": "text/javascript",
|
||||
".py": "text/x-python",
|
||||
".css": "text/css",
|
||||
".html": "text/html",
|
||||
}
|
||||
SPECIAL_TEXT_FILENAMES = {
|
||||
"dockerfile": "text/plain",
|
||||
"containerfile": "text/plain",
|
||||
}
|
||||
IMAGE_CONTENT_TYPES = {
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".png": "image/png",
|
||||
".webp": "image/webp",
|
||||
".gif": "image/gif",
|
||||
".bmp": "image/bmp",
|
||||
".avif": "image/avif",
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RemoteResolvedPath:
|
||||
raw_path: str
|
||||
client: RemoteClientItem
|
||||
share_key: str
|
||||
relative_path: str
|
||||
name: str
|
||||
root_path: str
|
||||
|
||||
|
||||
class RemoteFileService:
|
||||
def __init__(
|
||||
self,
|
||||
remote_client_service: RemoteClientService,
|
||||
agent_auth_header: str,
|
||||
agent_auth_scheme: str,
|
||||
agent_auth_token: str,
|
||||
agent_timeout_seconds: float = REMOTE_AGENT_TIMEOUT_SECONDS,
|
||||
text_preview_max_bytes: int = REMOTE_TEXT_PREVIEW_MAX_BYTES,
|
||||
download_read_timeout_seconds: float = REMOTE_DOWNLOAD_READ_TIMEOUT_SECONDS,
|
||||
stream_chunk_bytes: int = REMOTE_STREAM_CHUNK_BYTES,
|
||||
):
|
||||
self._remote_client_service = remote_client_service
|
||||
self._agent_auth_header = (agent_auth_header or "Authorization").strip() or "Authorization"
|
||||
self._agent_auth_scheme = (agent_auth_scheme or "Bearer").strip() or "Bearer"
|
||||
self._agent_auth_token = (agent_auth_token or "").strip()
|
||||
self._agent_timeout_seconds = max(0.1, float(agent_timeout_seconds))
|
||||
self._text_preview_max_bytes = max(1024, int(text_preview_max_bytes))
|
||||
self._download_read_timeout_seconds = max(0.1, float(download_read_timeout_seconds))
|
||||
self._stream_chunk_bytes = max(4096, int(stream_chunk_bytes))
|
||||
|
||||
def handles_path(self, path: str) -> bool:
|
||||
return RemoteBrowseService.handles_path(path)
|
||||
|
||||
def info(self, path: str) -> FileInfoResponse:
|
||||
resolved = self._resolve_remote_path(path, allow_share_root=True)
|
||||
payload = self._request_json(
|
||||
client=resolved.client,
|
||||
endpoint_path="/api/info",
|
||||
params={"share": resolved.share_key, "path": resolved.relative_path},
|
||||
)
|
||||
kind = str(payload.get("kind", "")).strip()
|
||||
if kind not in {"file", "directory"}:
|
||||
raise self._invalid_agent_payload(resolved.client, "Remote file info response was invalid")
|
||||
|
||||
extension = str(payload.get("extension", "") or "").strip() or PurePosixPath(resolved.name).suffix.lower() or None
|
||||
return FileInfoResponse(
|
||||
name=str(payload.get("name", resolved.name)).strip() or resolved.name,
|
||||
path=resolved.raw_path,
|
||||
type=kind,
|
||||
size=self._normalize_optional_int(payload.get("size")),
|
||||
modified=str(payload.get("modified", "")).strip(),
|
||||
root=resolved.root_path,
|
||||
extension=extension,
|
||||
content_type=self._normalize_optional_string(payload.get("content_type")),
|
||||
owner=self._normalize_optional_string(payload.get("owner")),
|
||||
group=self._normalize_optional_string(payload.get("group")),
|
||||
width=self._normalize_optional_int(payload.get("width")),
|
||||
height=self._normalize_optional_int(payload.get("height")),
|
||||
)
|
||||
|
||||
def view(self, path: str, *, for_edit: bool = False) -> ViewResponse:
|
||||
if for_edit:
|
||||
raise AppError(
|
||||
code="unsupported_type",
|
||||
message="Remote files are not supported for edit",
|
||||
status_code=409,
|
||||
details={"path": path},
|
||||
)
|
||||
resolved = self._resolve_remote_path(path)
|
||||
payload = self._request_json(
|
||||
client=resolved.client,
|
||||
endpoint_path="/api/read",
|
||||
params={
|
||||
"share": resolved.share_key,
|
||||
"path": resolved.relative_path,
|
||||
"max_bytes": str(self._text_preview_max_bytes),
|
||||
},
|
||||
)
|
||||
content = str(payload.get("content", ""))
|
||||
if len(content.encode("utf-8")) > self._text_preview_max_bytes:
|
||||
raise self._invalid_agent_payload(resolved.client, "Remote text preview exceeded the configured limit")
|
||||
return ViewResponse(
|
||||
path=resolved.raw_path,
|
||||
name=str(payload.get("name", resolved.name)).strip() or resolved.name,
|
||||
content_type=str(payload.get("content_type", self._content_type_for_name(resolved.name) or "text/plain")).strip(),
|
||||
encoding=str(payload.get("encoding", "utf-8")).strip() or "utf-8",
|
||||
truncated=bool(payload.get("truncated", False)),
|
||||
size=max(0, int(payload.get("size", 0))),
|
||||
modified=str(payload.get("modified", "")).strip(),
|
||||
content=content,
|
||||
)
|
||||
|
||||
def prepare_download(self, paths: list[str]) -> dict:
|
||||
if len(paths) != 1:
|
||||
raise AppError(
|
||||
code="invalid_request",
|
||||
message="Remote downloads support exactly one file per request",
|
||||
status_code=400,
|
||||
)
|
||||
resolved = self._resolve_remote_path(paths[0])
|
||||
stream = self._open_stream(
|
||||
client=resolved.client,
|
||||
endpoint_path="/api/download",
|
||||
params={"share": resolved.share_key, "path": resolved.relative_path},
|
||||
)
|
||||
content_disposition = stream.headers.get("content-disposition") or f'attachment; filename="{resolved.name}"'
|
||||
headers = {"Content-Disposition": content_disposition}
|
||||
if stream.headers.get("content-length"):
|
||||
headers["Content-Length"] = stream.headers["content-length"]
|
||||
return {
|
||||
"content": self._iter_remote_stream(stream),
|
||||
"headers": headers,
|
||||
"content_type": stream.headers.get("content-type", "application/octet-stream"),
|
||||
}
|
||||
|
||||
def prepare_image_stream(self, path: str) -> dict:
|
||||
resolved = self._resolve_remote_path(path)
|
||||
content_type = self._image_content_type_for_name(resolved.name)
|
||||
if content_type is None:
|
||||
raise AppError(
|
||||
code="unsupported_type",
|
||||
message="File type is not supported for image viewing",
|
||||
status_code=409,
|
||||
details={"path": path},
|
||||
)
|
||||
stream = self._open_stream(
|
||||
client=resolved.client,
|
||||
endpoint_path="/api/download",
|
||||
params={"share": resolved.share_key, "path": resolved.relative_path},
|
||||
)
|
||||
headers: dict[str, str] = {}
|
||||
if stream.headers.get("content-length"):
|
||||
headers["Content-Length"] = stream.headers["content-length"]
|
||||
return {
|
||||
"content": self._iter_remote_stream(stream),
|
||||
"headers": headers,
|
||||
"content_type": content_type,
|
||||
}
|
||||
|
||||
def _resolve_remote_path(self, path: str, *, allow_share_root: bool = False) -> RemoteResolvedPath:
|
||||
normalized = (path or "").strip().rstrip("/")
|
||||
if not self.handles_path(normalized):
|
||||
raise AppError(
|
||||
code="invalid_request",
|
||||
message="Remote path must be under /Clients",
|
||||
status_code=400,
|
||||
details={"path": path},
|
||||
)
|
||||
parts = normalized[len(RemoteBrowseService.ROOT_PATH) + 1 :].split("/") if normalized != RemoteBrowseService.ROOT_PATH else []
|
||||
min_parts = 2 if allow_share_root else 3
|
||||
if len(parts) < min_parts:
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Remote path must reference a file or directory inside a share",
|
||||
status_code=409,
|
||||
details={"path": path},
|
||||
)
|
||||
client = self._remote_client_service.get_client(parts[0])
|
||||
if client.status != "online":
|
||||
raise AppError(
|
||||
code="remote_client_unavailable",
|
||||
message=f"Remote client '{client.display_name}' is offline",
|
||||
status_code=503,
|
||||
details={"client_id": client.client_id, "status": client.status},
|
||||
)
|
||||
share_key = parts[1]
|
||||
if not any(share.key == share_key for share in client.shares):
|
||||
raise AppError(
|
||||
code="path_not_found",
|
||||
message="Remote share was not found",
|
||||
status_code=404,
|
||||
details={"client_id": client.client_id, "share_key": share_key},
|
||||
)
|
||||
relative_path = "/".join(parts[2:])
|
||||
if not relative_path and not allow_share_root:
|
||||
raise AppError(
|
||||
code="type_conflict",
|
||||
message="Remote file operation requires a path inside the share",
|
||||
status_code=409,
|
||||
details={"path": path},
|
||||
)
|
||||
name = parts[-1]
|
||||
if allow_share_root and len(parts) == 2:
|
||||
share = next((item for item in client.shares if item.key == share_key), None)
|
||||
if share is not None:
|
||||
name = share.label
|
||||
return RemoteResolvedPath(
|
||||
raw_path=normalized,
|
||||
client=client,
|
||||
share_key=share_key,
|
||||
relative_path=relative_path,
|
||||
name=name,
|
||||
root_path=f"{RemoteBrowseService.ROOT_PATH}/{client.client_id}/{share_key}",
|
||||
)
|
||||
|
||||
def _request_json(self, *, client: RemoteClientItem, endpoint_path: str, params: dict[str, str]) -> dict:
|
||||
url = self._build_url(client.endpoint, endpoint_path, params)
|
||||
timeout = httpx.Timeout(self._agent_timeout_seconds, connect=self._agent_timeout_seconds)
|
||||
try:
|
||||
with httpx.Client(timeout=timeout, headers=self._auth_headers()) as client_http:
|
||||
response = client_http.get(url)
|
||||
except httpx.TimeoutException as exc:
|
||||
raise self._timeout_error(client) from exc
|
||||
except httpx.HTTPError as exc:
|
||||
raise self._unreachable_error(client) from exc
|
||||
self._raise_for_agent_error(client=client, response=response)
|
||||
try:
|
||||
payload = response.json()
|
||||
except ValueError as exc:
|
||||
raise self._invalid_agent_payload(client, "Remote client returned invalid JSON") from exc
|
||||
if not isinstance(payload, dict):
|
||||
raise self._invalid_agent_payload(client, "Remote client returned an invalid response")
|
||||
return payload
|
||||
|
||||
def _open_stream(self, *, client: RemoteClientItem, endpoint_path: str, params: dict[str, str]) -> httpx.Response:
|
||||
url = self._build_url(client.endpoint, endpoint_path, params)
|
||||
timeout = httpx.Timeout(
|
||||
connect=self._agent_timeout_seconds,
|
||||
read=self._download_read_timeout_seconds,
|
||||
write=self._agent_timeout_seconds,
|
||||
pool=self._agent_timeout_seconds,
|
||||
)
|
||||
client_http = httpx.Client(timeout=timeout, headers=self._auth_headers())
|
||||
try:
|
||||
response = client_http.stream("GET", url)
|
||||
response.__enter__()
|
||||
except httpx.TimeoutException as exc:
|
||||
client_http.close()
|
||||
raise self._timeout_error(client) from exc
|
||||
except httpx.HTTPError as exc:
|
||||
client_http.close()
|
||||
raise self._unreachable_error(client) from exc
|
||||
try:
|
||||
self._raise_for_agent_error(client=client, response=response)
|
||||
except Exception:
|
||||
response.close()
|
||||
client_http.close()
|
||||
raise
|
||||
response.extensions["remote_client_http_client"] = client_http
|
||||
return response
|
||||
|
||||
def _iter_remote_stream(self, response: httpx.Response):
|
||||
client_http = response.extensions.get("remote_client_http_client")
|
||||
try:
|
||||
for chunk in response.iter_bytes(chunk_size=self._stream_chunk_bytes):
|
||||
if chunk:
|
||||
yield chunk
|
||||
finally:
|
||||
response.close()
|
||||
if client_http is not None:
|
||||
client_http.close()
|
||||
|
||||
def _raise_for_agent_error(self, *, client: RemoteClientItem, response: httpx.Response) -> None:
|
||||
if response.status_code < 400:
|
||||
return
|
||||
code = None
|
||||
message = None
|
||||
detail_payload = None
|
||||
try:
|
||||
payload = response.json()
|
||||
except ValueError:
|
||||
payload = None
|
||||
if isinstance(payload, dict):
|
||||
detail = payload.get("detail")
|
||||
if isinstance(detail, dict):
|
||||
detail_payload = detail
|
||||
code = self._normalize_optional_string(detail.get("code"))
|
||||
message = self._normalize_optional_string(detail.get("message"))
|
||||
elif isinstance(detail, str):
|
||||
message = detail.strip() or None
|
||||
|
||||
if response.status_code == 400:
|
||||
raise AppError(
|
||||
code=code or "invalid_request",
|
||||
message=message or "Remote request was rejected",
|
||||
status_code=400,
|
||||
details={"client_id": client.client_id},
|
||||
)
|
||||
if response.status_code == 403:
|
||||
agent_code = code or "forbidden"
|
||||
if agent_code == "invalid_agent_token":
|
||||
raise AppError(
|
||||
code="remote_client_forbidden",
|
||||
message=f"Remote client '{client.display_name}' rejected authentication",
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
)
|
||||
raise AppError(
|
||||
code=agent_code,
|
||||
message=message or "Remote access was denied",
|
||||
status_code=403,
|
||||
details={"client_id": client.client_id},
|
||||
)
|
||||
if response.status_code == 404:
|
||||
raise AppError(
|
||||
code=code or "path_not_found",
|
||||
message=message or "Remote path was not found",
|
||||
status_code=404,
|
||||
details={"client_id": client.client_id},
|
||||
)
|
||||
if response.status_code == 409:
|
||||
raise AppError(
|
||||
code=code or "type_conflict",
|
||||
message=message or "Remote file operation could not be completed",
|
||||
status_code=409,
|
||||
details={"client_id": client.client_id},
|
||||
)
|
||||
raise AppError(
|
||||
code="remote_client_error",
|
||||
message=message or f"Remote client '{client.display_name}' request failed",
|
||||
status_code=502,
|
||||
details={
|
||||
"client_id": client.client_id,
|
||||
"endpoint": client.endpoint,
|
||||
"status_code": str(response.status_code),
|
||||
"agent_code": code or "",
|
||||
"agent_detail": str(detail_payload or ""),
|
||||
},
|
||||
)
|
||||
|
||||
def _auth_headers(self) -> dict[str, str]:
|
||||
if not self._agent_auth_token:
|
||||
raise AppError(
|
||||
code="remote_client_agent_auth_not_configured",
|
||||
message="Remote client agent auth token is not configured",
|
||||
status_code=503,
|
||||
)
|
||||
return {self._agent_auth_header: f"{self._agent_auth_scheme} {self._agent_auth_token}"}
|
||||
|
||||
@staticmethod
|
||||
def _build_url(endpoint: str, endpoint_path: str, params: dict[str, str]) -> str:
|
||||
return f"{endpoint.rstrip('/')}{endpoint_path}?{urlencode(params)}"
|
||||
|
||||
@staticmethod
|
||||
def _timeout_error(client: RemoteClientItem) -> AppError:
|
||||
return AppError(
|
||||
code="remote_client_timeout",
|
||||
message=f"Remote client '{client.display_name}' timed out",
|
||||
status_code=504,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _unreachable_error(client: RemoteClientItem) -> AppError:
|
||||
return AppError(
|
||||
code="remote_client_unreachable",
|
||||
message=f"Remote client '{client.display_name}' is unreachable",
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _invalid_agent_payload(client: RemoteClientItem, message: str) -> AppError:
|
||||
return AppError(
|
||||
code="remote_client_error",
|
||||
message=message,
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _normalize_optional_string(value) -> str | None:
|
||||
normalized = str(value).strip() if value is not None else ""
|
||||
return normalized or None
|
||||
|
||||
@staticmethod
|
||||
def _normalize_optional_int(value) -> int | None:
|
||||
if value is None or value == "":
|
||||
return None
|
||||
try:
|
||||
return max(0, int(value))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _content_type_for_name(name: str) -> str | None:
|
||||
special_name = SPECIAL_TEXT_FILENAMES.get((name or "").lower())
|
||||
if special_name:
|
||||
return special_name
|
||||
return TEXT_CONTENT_TYPES.get(PurePosixPath(name).suffix.lower())
|
||||
|
||||
@staticmethod
|
||||
def _image_content_type_for_name(name: str) -> str | None:
|
||||
return IMAGE_CONTENT_TYPES.get(PurePosixPath(name).suffix.lower())
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
@@ -11,11 +12,43 @@ import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.dependencies import get_browse_service
|
||||
from backend.app.db.remote_client_repository import RemoteClientRepository
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.browse_service import BrowseService
|
||||
from backend.app.services.remote_browse_service import RemoteBrowseService
|
||||
from backend.app.services.remote_client_service import RemoteClientService
|
||||
|
||||
|
||||
class _StubRemoteBrowseService(RemoteBrowseService):
|
||||
def __init__(
|
||||
self,
|
||||
remote_client_service: RemoteClientService,
|
||||
listings: dict[tuple[str, str, str], dict],
|
||||
failing_client_ids: set[str],
|
||||
):
|
||||
super().__init__(
|
||||
remote_client_service=remote_client_service,
|
||||
agent_auth_header="Authorization",
|
||||
agent_auth_scheme="Bearer",
|
||||
agent_auth_token="agent-secret",
|
||||
agent_timeout_seconds=0.25,
|
||||
)
|
||||
self._listings = listings
|
||||
self._failing_client_ids = failing_client_ids
|
||||
|
||||
def _fetch_remote_listing(self, *, client, share_key: str, relative_path: str, show_hidden: bool) -> dict:
|
||||
if client.client_id in self._failing_client_ids:
|
||||
raise AppError(
|
||||
code="remote_client_unreachable",
|
||||
message=f"Remote client '{client.display_name}' is unreachable",
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
)
|
||||
return self._listings[(client.client_id, share_key, relative_path)]
|
||||
|
||||
|
||||
class BrowseApiGoldenTest(unittest.TestCase):
|
||||
@@ -36,6 +69,12 @@ class BrowseApiGoldenTest(unittest.TestCase):
|
||||
file_path.write_bytes(b"abc")
|
||||
second_file = self.second_root / "archive.txt"
|
||||
second_file.write_text("z", encoding="utf-8")
|
||||
remote_root = Path(self.temp_dir.name) / "remote-downloads"
|
||||
remote_root.mkdir(parents=True, exist_ok=True)
|
||||
remote_dir = remote_root / "Series"
|
||||
remote_dir.mkdir()
|
||||
remote_file = remote_root / "episode.mkv"
|
||||
remote_file.write_bytes(b"remote")
|
||||
|
||||
hidden_dir = self.root / ".hidden_dir"
|
||||
hidden_dir.mkdir()
|
||||
@@ -43,15 +82,70 @@ class BrowseApiGoldenTest(unittest.TestCase):
|
||||
hidden_file.write_bytes(b"x")
|
||||
|
||||
mtime = 1710000000
|
||||
for path in [folder, file_path, hidden_dir, hidden_file, second_file]:
|
||||
for path in [folder, file_path, hidden_dir, hidden_file, second_file, remote_dir, remote_file]:
|
||||
Path(path).touch()
|
||||
Path(path).chmod(0o755)
|
||||
import os
|
||||
os.utime(path, (mtime, mtime))
|
||||
|
||||
repository = RemoteClientRepository(str(Path(self.temp_dir.name) / "remote-clients.db"))
|
||||
now_iso = "2026-03-26T12:00:00Z"
|
||||
repository.upsert_client(
|
||||
client_id="client-123",
|
||||
display_name="Jan MacBook",
|
||||
platform="macos",
|
||||
agent_version="1.1.0",
|
||||
endpoint="http://agent.test",
|
||||
shares=[{"key": "downloads", "label": "Downloads"}],
|
||||
now_iso=now_iso,
|
||||
)
|
||||
repository.upsert_client(
|
||||
client_id="broken-client",
|
||||
display_name="Offline iMac",
|
||||
platform="macos",
|
||||
agent_version="1.1.0",
|
||||
endpoint="http://127.0.0.1:1",
|
||||
shares=[{"key": "downloads", "label": "Downloads"}],
|
||||
now_iso=now_iso,
|
||||
)
|
||||
|
||||
service = BrowseService(
|
||||
path_guard=PathGuard({"storage1": str(self.root), "storage2": str(self.second_root)}),
|
||||
filesystem=FilesystemAdapter(),
|
||||
remote_browse_service=_StubRemoteBrowseService(
|
||||
remote_client_service=RemoteClientService(
|
||||
repository=repository,
|
||||
registration_token="secret-token",
|
||||
offline_timeout_seconds=60,
|
||||
now=lambda: datetime(2026, 3, 26, 12, 0, 0, tzinfo=timezone.utc),
|
||||
),
|
||||
listings={
|
||||
(
|
||||
"client-123",
|
||||
"downloads",
|
||||
"",
|
||||
): {
|
||||
"entries": [
|
||||
{
|
||||
"name": "Series",
|
||||
"kind": "directory",
|
||||
"size": remote_dir.stat().st_size,
|
||||
"modified": datetime.fromtimestamp(remote_dir.stat().st_mtime, tz=timezone.utc)
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z"),
|
||||
},
|
||||
{
|
||||
"name": "episode.mkv",
|
||||
"kind": "file",
|
||||
"size": remote_file.stat().st_size,
|
||||
"modified": datetime.fromtimestamp(remote_file.stat().st_mtime, tz=timezone.utc)
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z"),
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
failing_client_ids={"broken-client"},
|
||||
),
|
||||
)
|
||||
async def _override_browse_service() -> BrowseService:
|
||||
return service
|
||||
@@ -151,6 +245,80 @@ class BrowseApiGoldenTest(unittest.TestCase):
|
||||
},
|
||||
)
|
||||
|
||||
def test_browse_virtual_clients_and_remote_share(self) -> None:
|
||||
clients_response = self._get("/Clients")
|
||||
self.assertEqual(clients_response.status_code, 200)
|
||||
self.assertEqual(
|
||||
clients_response.json(),
|
||||
{
|
||||
"path": "/Clients",
|
||||
"directories": [
|
||||
{
|
||||
"name": "Jan MacBook",
|
||||
"path": "/Clients/client-123",
|
||||
"modified": "2026-03-26T12:00:00Z",
|
||||
},
|
||||
{
|
||||
"name": "Offline iMac",
|
||||
"path": "/Clients/broken-client",
|
||||
"modified": "2026-03-26T12:00:00Z",
|
||||
},
|
||||
],
|
||||
"files": [],
|
||||
},
|
||||
)
|
||||
|
||||
shares_response = self._get("/Clients/client-123")
|
||||
self.assertEqual(shares_response.status_code, 200)
|
||||
self.assertEqual(
|
||||
shares_response.json(),
|
||||
{
|
||||
"path": "/Clients/client-123",
|
||||
"directories": [
|
||||
{
|
||||
"name": "Downloads",
|
||||
"path": "/Clients/client-123/downloads",
|
||||
"modified": "2026-03-26T12:00:00Z",
|
||||
}
|
||||
],
|
||||
"files": [],
|
||||
},
|
||||
)
|
||||
|
||||
browse_response = self._get("/Clients/client-123/downloads")
|
||||
self.assertEqual(browse_response.status_code, 200)
|
||||
modified = datetime.fromtimestamp(1710000000, tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
self.assertEqual(
|
||||
browse_response.json(),
|
||||
{
|
||||
"path": "/Clients/client-123/downloads",
|
||||
"directories": [
|
||||
{
|
||||
"name": "Series",
|
||||
"path": "/Clients/client-123/downloads/Series",
|
||||
"modified": modified,
|
||||
}
|
||||
],
|
||||
"files": [
|
||||
{
|
||||
"name": "episode.mkv",
|
||||
"path": "/Clients/client-123/downloads/episode.mkv",
|
||||
"size": 6,
|
||||
"modified": modified,
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
def test_remote_client_failure_stays_local_to_remote_subtree(self) -> None:
|
||||
broken_response = self._get("/Clients/broken-client/downloads")
|
||||
self.assertEqual(broken_response.status_code, 502)
|
||||
self.assertEqual(broken_response.json()["error"]["code"], "remote_client_unreachable")
|
||||
|
||||
volumes_response = self._get("/Volumes")
|
||||
self.assertEqual(volumes_response.status_code, 200)
|
||||
self.assertEqual(volumes_response.json()["path"], "/Volumes")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -0,0 +1,139 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.dependencies import get_remote_client_service
|
||||
from backend.app.db.remote_client_repository import RemoteClientRepository
|
||||
from backend.app.main import app
|
||||
from backend.app.services.remote_client_service import RemoteClientService
|
||||
|
||||
|
||||
class _Clock:
|
||||
def __init__(self, current: datetime):
|
||||
self.current = current
|
||||
|
||||
def now(self) -> datetime:
|
||||
return self.current
|
||||
|
||||
def advance(self, *, seconds: int) -> None:
|
||||
self.current += timedelta(seconds=seconds)
|
||||
|
||||
|
||||
class RemoteClientsApiGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.clock = _Clock(datetime(2026, 3, 26, 12, 0, 0, tzinfo=timezone.utc))
|
||||
repository = RemoteClientRepository(str(Path(self.temp_dir.name) / "remote-clients.db"))
|
||||
service = RemoteClientService(
|
||||
repository=repository,
|
||||
registration_token="secret-token",
|
||||
offline_timeout_seconds=60,
|
||||
now=self.clock.now,
|
||||
)
|
||||
|
||||
async def _override_remote_client_service() -> RemoteClientService:
|
||||
return service
|
||||
|
||||
app.dependency_overrides[get_remote_client_service] = _override_remote_client_service
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _request(self, method: str, url: str, payload: dict | None = None, token: str | None = None) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
headers = {}
|
||||
if token is not None:
|
||||
headers["Authorization"] = f"Bearer {token}"
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
if method == "GET":
|
||||
return await client.get(url, headers=headers)
|
||||
return await client.post(url, json=payload, headers=headers)
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
@staticmethod
|
||||
def _register_payload() -> dict:
|
||||
return {
|
||||
"client_id": "client-123",
|
||||
"display_name": "Jan MacBook",
|
||||
"platform": "macos",
|
||||
"agent_version": "1.1.0",
|
||||
"endpoint": "http://192.168.1.25:8765",
|
||||
"shares": [{"key": "downloads", "label": "Downloads"}],
|
||||
}
|
||||
|
||||
def test_list_is_empty_by_default(self) -> None:
|
||||
response = self._request("GET", "/api/clients")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json(), {"items": []})
|
||||
|
||||
def test_register_then_list_then_heartbeat_and_status_timeout(self) -> None:
|
||||
register_response = self._request(
|
||||
"POST",
|
||||
"/api/clients/register",
|
||||
self._register_payload(),
|
||||
token="secret-token",
|
||||
)
|
||||
|
||||
self.assertEqual(register_response.status_code, 200)
|
||||
register_body = register_response.json()
|
||||
self.assertEqual(register_body["client_id"], "client-123")
|
||||
self.assertEqual(register_body["display_name"], "Jan MacBook")
|
||||
self.assertEqual(register_body["status"], "online")
|
||||
self.assertEqual(register_body["last_seen"], "2026-03-26T12:00:00Z")
|
||||
self.assertIsNone(register_body["last_error"])
|
||||
self.assertIsNone(register_body["reachable_at"])
|
||||
|
||||
list_response = self._request("GET", "/api/clients")
|
||||
self.assertEqual(list_response.status_code, 200)
|
||||
self.assertEqual(len(list_response.json()["items"]), 1)
|
||||
self.assertEqual(list_response.json()["items"][0]["status"], "online")
|
||||
|
||||
self.clock.advance(seconds=30)
|
||||
heartbeat_response = self._request(
|
||||
"POST",
|
||||
"/api/clients/heartbeat",
|
||||
{"client_id": "client-123", "agent_version": "1.1.1"},
|
||||
token="secret-token",
|
||||
)
|
||||
self.assertEqual(heartbeat_response.status_code, 200)
|
||||
heartbeat_body = heartbeat_response.json()
|
||||
self.assertEqual(heartbeat_body["agent_version"], "1.1.1")
|
||||
self.assertEqual(heartbeat_body["last_seen"], "2026-03-26T12:00:30Z")
|
||||
self.assertEqual(heartbeat_body["status"], "online")
|
||||
|
||||
self.clock.advance(seconds=61)
|
||||
timed_out_list = self._request("GET", "/api/clients")
|
||||
self.assertEqual(timed_out_list.status_code, 200)
|
||||
timed_out_item = timed_out_list.json()["items"][0]
|
||||
self.assertEqual(timed_out_item["status"], "offline")
|
||||
self.assertEqual(timed_out_item["last_seen"], "2026-03-26T12:00:30Z")
|
||||
self.assertIsNone(timed_out_item["last_error"])
|
||||
self.assertIsNone(timed_out_item["reachable_at"])
|
||||
|
||||
def test_register_rejects_invalid_token(self) -> None:
|
||||
response = self._request(
|
||||
"POST",
|
||||
"/api/clients/register",
|
||||
self._register_payload(),
|
||||
token="wrong-token",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertEqual(response.json()["error"]["code"], "forbidden")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,269 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
||||
|
||||
from backend.app.api.errors import AppError
|
||||
from backend.app.dependencies import get_browse_service, get_remote_file_service
|
||||
from backend.app.db.remote_client_repository import RemoteClientRepository
|
||||
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
||||
from backend.app.main import app
|
||||
from backend.app.security.path_guard import PathGuard
|
||||
from backend.app.services.browse_service import BrowseService
|
||||
from backend.app.services.remote_client_service import RemoteClientService
|
||||
from backend.app.services.remote_file_service import RemoteFileService
|
||||
|
||||
|
||||
PNG_1X1 = base64.b64decode(
|
||||
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGP4z8AAAAMBAQDJ/pLvAAAAAElFTkSuQmCC"
|
||||
)
|
||||
|
||||
|
||||
class _StubRemoteFileService(RemoteFileService):
|
||||
def __init__(
|
||||
self,
|
||||
remote_client_service: RemoteClientService,
|
||||
*,
|
||||
payloads: dict[tuple[str, str, str, str], dict],
|
||||
streams: dict[tuple[str, str, str], dict],
|
||||
failing_client_ids: set[str],
|
||||
):
|
||||
super().__init__(
|
||||
remote_client_service=remote_client_service,
|
||||
agent_auth_header="Authorization",
|
||||
agent_auth_scheme="Bearer",
|
||||
agent_auth_token="agent-secret",
|
||||
)
|
||||
self._payloads = payloads
|
||||
self._streams = streams
|
||||
self._failing_client_ids = failing_client_ids
|
||||
|
||||
def _request_json(self, *, client, endpoint_path: str, params: dict[str, str]) -> dict:
|
||||
if client.client_id in self._failing_client_ids:
|
||||
raise AppError(
|
||||
code="remote_client_unreachable",
|
||||
message=f"Remote client '{client.display_name}' is unreachable",
|
||||
status_code=502,
|
||||
details={"client_id": client.client_id, "endpoint": client.endpoint},
|
||||
)
|
||||
return self._payloads[(client.client_id, endpoint_path, params["share"], params.get("path", ""))]
|
||||
|
||||
def prepare_download(self, paths: list[str]) -> dict:
|
||||
resolved = self._resolve_remote_path(paths[0])
|
||||
item = self._stream_item(resolved.client.client_id, resolved.share_key, resolved.relative_path, resolved.name)
|
||||
return {
|
||||
"content": self._bytes_iter(item["content"]),
|
||||
"headers": {"Content-Disposition": item["headers"]["content-disposition"]},
|
||||
"content_type": item["headers"]["content-type"],
|
||||
}
|
||||
|
||||
def prepare_image_stream(self, path: str) -> dict:
|
||||
resolved = self._resolve_remote_path(path)
|
||||
item = self._stream_item(resolved.client.client_id, resolved.share_key, resolved.relative_path, resolved.name)
|
||||
return {
|
||||
"content": self._bytes_iter(item["content"]),
|
||||
"headers": {"Content-Length": item["headers"]["content-length"]},
|
||||
"content_type": item["headers"]["content-type"],
|
||||
}
|
||||
|
||||
def _stream_item(self, client_id: str, share_key: str, relative_path: str, default_name: str) -> dict:
|
||||
if client_id in self._failing_client_ids:
|
||||
raise AppError(
|
||||
code="remote_client_unreachable",
|
||||
message=f"Remote client '{default_name}' is unreachable",
|
||||
status_code=502,
|
||||
details={"client_id": client_id},
|
||||
)
|
||||
return self._streams[(client_id, share_key, relative_path)]
|
||||
|
||||
@staticmethod
|
||||
async def _bytes_iter(payload: bytes):
|
||||
yield payload
|
||||
|
||||
|
||||
class RemoteFileOpsApiGoldenTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.temp_dir = tempfile.TemporaryDirectory()
|
||||
self.volumes_root = Path(self.temp_dir.name) / "Volumes"
|
||||
self.volumes_root.mkdir(parents=True, exist_ok=True)
|
||||
self.storage_root = self.volumes_root / "8TB"
|
||||
self.storage_root.mkdir(parents=True, exist_ok=True)
|
||||
local_file = self.storage_root / "local.txt"
|
||||
local_file.write_text("local", encoding="utf-8")
|
||||
mtime = 1710000000
|
||||
os.utime(local_file, (mtime, mtime))
|
||||
|
||||
repository = RemoteClientRepository(str(Path(self.temp_dir.name) / "remote-clients.db"))
|
||||
now_iso = "2026-03-26T12:00:00Z"
|
||||
repository.upsert_client(
|
||||
client_id="client-123",
|
||||
display_name="Jan MacBook",
|
||||
platform="macos",
|
||||
agent_version="1.1.0",
|
||||
endpoint="http://agent.test",
|
||||
shares=[{"key": "downloads", "label": "Downloads"}],
|
||||
now_iso=now_iso,
|
||||
)
|
||||
repository.upsert_client(
|
||||
client_id="broken-client",
|
||||
display_name="Offline iMac",
|
||||
platform="macos",
|
||||
agent_version="1.1.0",
|
||||
endpoint="http://broken.test",
|
||||
shares=[{"key": "downloads", "label": "Downloads"}],
|
||||
now_iso=now_iso,
|
||||
)
|
||||
remote_client_service = RemoteClientService(
|
||||
repository=repository,
|
||||
registration_token="secret-token",
|
||||
offline_timeout_seconds=60,
|
||||
now=lambda: datetime(2026, 3, 26, 12, 0, 0, tzinfo=timezone.utc),
|
||||
)
|
||||
remote_file_service = _StubRemoteFileService(
|
||||
remote_client_service,
|
||||
payloads={
|
||||
(
|
||||
"client-123",
|
||||
"/api/info",
|
||||
"downloads",
|
||||
"notes.md",
|
||||
): {
|
||||
"name": "notes.md",
|
||||
"kind": "file",
|
||||
"size": 13,
|
||||
"modified": "2026-03-26T12:00:00Z",
|
||||
"content_type": "text/markdown",
|
||||
"extension": ".md",
|
||||
"width": None,
|
||||
"height": None,
|
||||
"owner": None,
|
||||
"group": None,
|
||||
},
|
||||
(
|
||||
"client-123",
|
||||
"/api/read",
|
||||
"downloads",
|
||||
"notes.md",
|
||||
): {
|
||||
"name": "notes.md",
|
||||
"content_type": "text/markdown",
|
||||
"encoding": "utf-8",
|
||||
"truncated": False,
|
||||
"size": 13,
|
||||
"modified": "2026-03-26T12:00:00Z",
|
||||
"content": "# title\nhello",
|
||||
},
|
||||
},
|
||||
streams={
|
||||
(
|
||||
"client-123",
|
||||
"downloads",
|
||||
"notes.md",
|
||||
): {
|
||||
"headers": {
|
||||
"content-type": "text/markdown; charset=utf-8",
|
||||
"content-disposition": 'attachment; filename="notes.md"',
|
||||
"content-length": "13",
|
||||
},
|
||||
"content": b"# title\nhello",
|
||||
},
|
||||
(
|
||||
"client-123",
|
||||
"downloads",
|
||||
"pixel.png",
|
||||
): {
|
||||
"headers": {
|
||||
"content-type": "image/png",
|
||||
"content-disposition": 'attachment; filename="pixel.png"',
|
||||
"content-length": str(len(PNG_1X1)),
|
||||
},
|
||||
"content": PNG_1X1,
|
||||
},
|
||||
},
|
||||
failing_client_ids={"broken-client"},
|
||||
)
|
||||
browse_service = BrowseService(
|
||||
path_guard=PathGuard({"storage1": str(self.storage_root)}),
|
||||
filesystem=FilesystemAdapter(),
|
||||
)
|
||||
|
||||
async def _override_remote_file_service() -> RemoteFileService:
|
||||
return remote_file_service
|
||||
|
||||
async def _override_browse_service() -> BrowseService:
|
||||
return browse_service
|
||||
|
||||
app.dependency_overrides[get_remote_file_service] = _override_remote_file_service
|
||||
app.dependency_overrides[get_browse_service] = _override_browse_service
|
||||
|
||||
def tearDown(self) -> None:
|
||||
app.dependency_overrides.clear()
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
def _request(self, method: str, url: str, *, params: dict | list[tuple[str, str]] | None = None) -> httpx.Response:
|
||||
async def _run() -> httpx.Response:
|
||||
transport = httpx.ASGITransport(app=app)
|
||||
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
|
||||
return await client.request(method, url, params=params)
|
||||
|
||||
return asyncio.run(_run())
|
||||
|
||||
def test_remote_info_view_image_and_download_work(self) -> None:
|
||||
info_response = self._request("GET", "/api/files/info", params={"path": "/Clients/client-123/downloads/notes.md"})
|
||||
self.assertEqual(info_response.status_code, 200)
|
||||
self.assertEqual(
|
||||
info_response.json(),
|
||||
{
|
||||
"name": "notes.md",
|
||||
"path": "/Clients/client-123/downloads/notes.md",
|
||||
"type": "file",
|
||||
"size": 13,
|
||||
"modified": "2026-03-26T12:00:00Z",
|
||||
"root": "/Clients/client-123/downloads",
|
||||
"extension": ".md",
|
||||
"content_type": "text/markdown",
|
||||
"owner": None,
|
||||
"group": None,
|
||||
"width": None,
|
||||
"height": None,
|
||||
},
|
||||
)
|
||||
|
||||
view_response = self._request("GET", "/api/files/view", params={"path": "/Clients/client-123/downloads/notes.md"})
|
||||
self.assertEqual(view_response.status_code, 200)
|
||||
self.assertEqual(view_response.json()["content"], "# title\nhello")
|
||||
self.assertEqual(view_response.json()["content_type"], "text/markdown")
|
||||
|
||||
image_response = self._request("GET", "/api/files/image", params={"path": "/Clients/client-123/downloads/pixel.png"})
|
||||
self.assertEqual(image_response.status_code, 200)
|
||||
self.assertEqual(image_response.headers.get("content-type"), "image/png")
|
||||
self.assertEqual(image_response.content, PNG_1X1)
|
||||
|
||||
download_response = self._request("GET", "/api/files/download", params=[("path", "/Clients/client-123/downloads/notes.md")])
|
||||
self.assertEqual(download_response.status_code, 200)
|
||||
self.assertEqual(download_response.content, b"# title\nhello")
|
||||
self.assertIn('attachment; filename="notes.md"', download_response.headers.get("content-disposition", ""))
|
||||
|
||||
def test_remote_failure_stays_local_and_volumes_behavior_is_unchanged(self) -> None:
|
||||
failed_response = self._request("GET", "/api/files/info", params={"path": "/Clients/broken-client/downloads/notes.md"})
|
||||
self.assertEqual(failed_response.status_code, 502)
|
||||
self.assertEqual(failed_response.json()["error"]["code"], "remote_client_unreachable")
|
||||
|
||||
volumes_response = self._request("GET", "/api/browse", params={"path": "/Volumes/8TB"})
|
||||
self.assertEqual(volumes_response.status_code, 200)
|
||||
self.assertEqual(volumes_response.json()["path"], "/Volumes/8TB")
|
||||
self.assertEqual([item["name"] for item in volumes_response.json()["files"]], ["local.txt"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -850,7 +850,7 @@ class UiSmokeGoldenTest(unittest.TestCase):
|
||||
self.assertIn('id="download-modal-close-btn"', body)
|
||||
self.assertIn('id="context-menu"', body)
|
||||
self.assertIn('id="context-menu-scope"', body)
|
||||
self.assertIn('id="context-menu-target"', body)
|
||||
self.assertNotIn('id="context-menu-target"', body)
|
||||
self.assertIn('id="context-menu-open-btn"', body)
|
||||
self.assertIn('id="context-menu-edit-btn"', body)
|
||||
self.assertIn('id="context-menu-download-btn"', body)
|
||||
@@ -1133,7 +1133,7 @@ class UiSmokeGoldenTest(unittest.TestCase):
|
||||
self.assertIn('setStatus("Preparing download...");', app_js)
|
||||
self.assertIn('setStatus("Requesting download...");', app_js)
|
||||
self.assertIn('setStatus(zipDownload ? "Preparing download..." : "Requesting download...");', app_js)
|
||||
self.assertIn('setStatus(`Download requested: ${anchor.download}`);', app_js)
|
||||
self.assertIn('setStatus(`Download requested: ${fileName}`);', app_js)
|
||||
self.assertIn('"/api/files/download/archive-prepare"', app_js)
|
||||
self.assertIn('"/api/files/duplicate"', app_js)
|
||||
self.assertIn('"/api/files/delete"', app_js)
|
||||
@@ -1181,24 +1181,26 @@ class UiSmokeGoldenTest(unittest.TestCase):
|
||||
self.assertIn('if (!row) {', app_js)
|
||||
self.assertIn('closeContextMenu();', app_js)
|
||||
self.assertIn('elements.openButton.classList.toggle("hidden", isMulti);', app_js)
|
||||
self.assertIn('const openableSingle = items.length === 1 && isOpenableSelection(items[0]);', app_js)
|
||||
self.assertIn('const openableSingle =', app_js)
|
||||
self.assertIn('items[0].kind === "directory" || isRemoteViewableSelection(items[0])', app_js)
|
||||
self.assertIn('elements.openButton.disabled = !openableSingle;', app_js)
|
||||
self.assertIn('if (item.kind === "directory") {', app_js)
|
||||
self.assertIn('return isImageSelection(item) || isVideoSelection(item);', app_js)
|
||||
self.assertIn('const editableSingle = items.length === 1 && isEditableSelection(items[0]);', app_js)
|
||||
self.assertIn('const editableSingle = items.length === 1 && !remoteSelection && isEditableSelection(items[0]);', app_js)
|
||||
self.assertIn('return [".txt", ".log", ".md", ".yml", ".yaml", ".json", ".js", ".py", ".css", ".html", ".conf"].some((suffix) => lower.endsWith(suffix));', app_js)
|
||||
self.assertIn('if (!item || item.kind !== "file") {', app_js)
|
||||
self.assertIn('elements.editButton.classList.toggle("hidden", isMulti || items.length !== 1 || items[0].kind !== "file");', app_js)
|
||||
self.assertIn('elements.editButton.classList.toggle("hidden", isMulti || items.length !== 1 || items[0].kind !== "file" || remoteSelection);', app_js)
|
||||
self.assertIn('elements.editButton.disabled = !editableSingle;', app_js)
|
||||
self.assertIn('const downloadableSelection = items.length > 0;', app_js)
|
||||
self.assertIn('const downloadableSelection = items.length === 1 && items[0].kind === "file";', app_js)
|
||||
self.assertIn('elements.downloadButton.classList.remove("hidden");', app_js)
|
||||
self.assertIn('elements.downloadButton.disabled = !downloadableSelection;', app_js)
|
||||
self.assertIn('elements.renameButton.classList.toggle("hidden", isMulti);', app_js)
|
||||
self.assertIn('elements.renameButton.classList.toggle("hidden", isMulti || remoteSelection);', app_js)
|
||||
self.assertIn('elements.duplicateButton.classList.remove("hidden");', app_js)
|
||||
self.assertIn('elements.duplicateButton.disabled = items.length === 0;', app_js)
|
||||
self.assertIn('elements.duplicateButton.disabled = remoteSelection || items.length === 0;', app_js)
|
||||
self.assertIn('elements.copyButton.classList.remove("hidden");', app_js)
|
||||
self.assertIn('elements.copyButton.disabled = items.length === 0;', app_js)
|
||||
self.assertIn('elements.copyButton.disabled = remoteSelection || items.length === 0;', app_js)
|
||||
self.assertIn('elements.moveButton.classList.remove("hidden");', app_js)
|
||||
self.assertIn('elements.moveButton.disabled = remoteSelection || items.length === 0;', app_js)
|
||||
self.assertIn('elements.propertiesButton.classList.remove("hidden");', app_js)
|
||||
self.assertIn('elements.propertiesButton.disabled = items.length === 0;', app_js)
|
||||
self.assertIn('openCurrentDirectory();', app_js)
|
||||
@@ -1207,8 +1209,8 @@ class UiSmokeGoldenTest(unittest.TestCase):
|
||||
self.assertIn('const created = await createArchiveDownloadTask(selectedPaths);', app_js)
|
||||
self.assertIn('const task = await waitForArchiveDownloadReady(created.task_id);', app_js)
|
||||
self.assertIn('startArchiveDownload(task.id, task.destination);', app_js)
|
||||
self.assertIn('const { blob, fileName } = await downloadFileRequest(selectedPaths);', app_js)
|
||||
self.assertIn('anchor.download = fileName || selected.name;', app_js)
|
||||
self.assertIn('const response = await downloadFileRequest(selectedPaths);', app_js)
|
||||
self.assertIn('anchor.download = response.fileName || selected.name;', app_js)
|
||||
self.assertIn('openRenamePopup();', app_js)
|
||||
self.assertIn('const result = await createDuplicateTask(selectedItems.map((item) => item.path));', app_js)
|
||||
self.assertIn('showActionSummary("Duplicate", 1, 0, null);', app_js)
|
||||
@@ -1233,7 +1235,7 @@ class UiSmokeGoldenTest(unittest.TestCase):
|
||||
self.assertIn('renderInfoField("Selected items", selectedItems.length);', app_js)
|
||||
self.assertIn('renderInfoField("Files", fileCount);', app_js)
|
||||
self.assertIn('renderInfoField("Directories", directoryCount);', app_js)
|
||||
self.assertIn('document.getElementById("copy-btn").disabled = !hasSelection;', app_js)
|
||||
self.assertIn('document.getElementById("copy-btn").disabled = remoteBrowse || !hasSelection;', app_js)
|
||||
self.assertNotIn('Only files are supported for copy', app_js)
|
||||
self.assertIn('document.getElementById("upload-menu-toggle").onclick = (event) => {', app_js)
|
||||
self.assertIn('document.getElementById("upload-folder-btn").onclick = openFolderPicker;', app_js)
|
||||
|
||||
+182
-37
@@ -141,6 +141,10 @@ const VALID_THEME_FAMILIES = [
|
||||
"fluent-neon",
|
||||
];
|
||||
const VALID_COLOR_MODES = ["dark", "light"];
|
||||
const VIRTUAL_SOURCES = [
|
||||
{ path: "/Volumes", label: "Volumes" },
|
||||
{ path: "/Clients", label: "Clients" },
|
||||
];
|
||||
let searchState = {
|
||||
pane: "left",
|
||||
path: "/Volumes",
|
||||
@@ -200,6 +204,56 @@ function activePaneState() {
|
||||
return paneState(state.activePane);
|
||||
}
|
||||
|
||||
function sourceRootForPath(path) {
|
||||
const normalized = (path || "").trim();
|
||||
if (normalized === "/Clients" || normalized.startsWith("/Clients/")) {
|
||||
return "/Clients";
|
||||
}
|
||||
return "/Volumes";
|
||||
}
|
||||
|
||||
function isRemoteBrowsePath(path) {
|
||||
return sourceRootForPath(path) === "/Clients";
|
||||
}
|
||||
|
||||
function syncSourceSwitchers() {
|
||||
["left", "right"].forEach((pane) => {
|
||||
const container = document.getElementById(`${pane}-source-switcher`);
|
||||
if (!container) {
|
||||
return;
|
||||
}
|
||||
const activeSource = sourceRootForPath(paneState(pane).currentPath);
|
||||
[...container.querySelectorAll("button[data-source-path]")].forEach((button) => {
|
||||
const isActive = button.dataset.sourcePath === activeSource;
|
||||
button.disabled = isActive;
|
||||
button.setAttribute("aria-pressed", isActive ? "true" : "false");
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function ensureSourceSwitchers() {
|
||||
["left", "right"].forEach((pane) => {
|
||||
const toolbar = document.querySelector(`#${pane}-pane .pane-topbar`);
|
||||
if (!toolbar || document.getElementById(`${pane}-source-switcher`)) {
|
||||
return;
|
||||
}
|
||||
const container = document.createElement("div");
|
||||
container.id = `${pane}-source-switcher`;
|
||||
container.className = "pane-source-switcher";
|
||||
VIRTUAL_SOURCES.forEach((source) => {
|
||||
const button = createButton(source.label, () => {
|
||||
setActivePane(pane);
|
||||
navigateTo(pane, source.path);
|
||||
});
|
||||
button.type = "button";
|
||||
button.dataset.sourcePath = source.path;
|
||||
container.append(button);
|
||||
});
|
||||
toolbar.prepend(container);
|
||||
});
|
||||
syncSourceSwitchers();
|
||||
}
|
||||
|
||||
function setStatus(msg) {
|
||||
document.getElementById("status").textContent = msg;
|
||||
}
|
||||
@@ -382,7 +436,6 @@ function contextMenuElements() {
|
||||
return {
|
||||
menu: document.getElementById("context-menu"),
|
||||
scope: document.getElementById("context-menu-scope"),
|
||||
target: document.getElementById("context-menu-target"),
|
||||
openButton: document.getElementById("context-menu-open-btn"),
|
||||
editButton: document.getElementById("context-menu-edit-btn"),
|
||||
downloadButton: document.getElementById("context-menu-download-btn"),
|
||||
@@ -405,6 +458,23 @@ function isOpenableSelection(item) {
|
||||
return isImageSelection(item) || isVideoSelection(item);
|
||||
}
|
||||
|
||||
function isTextPreviewSelection(item) {
|
||||
if (!item || item.kind !== "file") {
|
||||
return false;
|
||||
}
|
||||
const lower = (item.name || "").toLowerCase();
|
||||
if (lower === "dockerfile" || lower === "containerfile") {
|
||||
return true;
|
||||
}
|
||||
return [".txt", ".log", ".ini", ".cfg", ".conf", ".md", ".yml", ".yaml", ".json", ".js", ".py", ".css", ".html"].some((suffix) =>
|
||||
lower.endsWith(suffix)
|
||||
);
|
||||
}
|
||||
|
||||
function isRemoteViewableSelection(item) {
|
||||
return isImageSelection(item) || isTextPreviewSelection(item);
|
||||
}
|
||||
|
||||
function isZipDownloadSelection(items) {
|
||||
return items.length > 1 || (items.length === 1 && items[0].kind === "directory");
|
||||
}
|
||||
@@ -703,7 +773,8 @@ function closeContextMenu() {
|
||||
}
|
||||
elements.menu.classList.add("hidden");
|
||||
elements.scope.textContent = "";
|
||||
elements.target.textContent = "";
|
||||
elements.menu.style.left = "";
|
||||
elements.menu.style.top = "";
|
||||
}
|
||||
|
||||
function openContextMenu(pane, entry, event) {
|
||||
@@ -716,6 +787,7 @@ function openContextMenu(pane, entry, event) {
|
||||
const items = selectedPathsSet.has(entry.path)
|
||||
? selectedItems.map((item) => ({ ...item }))
|
||||
: [selectedEntryFromItem(entry)];
|
||||
const remoteSelection = items.some((item) => isRemoteBrowsePath(item.path));
|
||||
|
||||
contextMenuState.open = true;
|
||||
contextMenuState.pane = pane;
|
||||
@@ -723,34 +795,70 @@ function openContextMenu(pane, entry, event) {
|
||||
contextMenuState.anchorPath = entry.path;
|
||||
|
||||
const isMulti = items.length > 1;
|
||||
const openableSingle = items.length === 1 && isOpenableSelection(items[0]);
|
||||
const editableSingle = items.length === 1 && isEditableSelection(items[0]);
|
||||
const downloadableSelection = items.length > 0;
|
||||
const openableSingle =
|
||||
items.length === 1 && (remoteSelection ? items[0].kind === "directory" || isRemoteViewableSelection(items[0]) : isOpenableSelection(items[0]));
|
||||
const editableSingle = items.length === 1 && !remoteSelection && isEditableSelection(items[0]);
|
||||
const downloadableSelection = items.length === 1 && items[0].kind === "file";
|
||||
elements.scope.textContent = isMulti ? "Multi-selection" : "Single item";
|
||||
elements.target.textContent = isMulti ? `${items.length} selected items` : entry.name;
|
||||
elements.openButton.classList.toggle("hidden", isMulti);
|
||||
elements.openButton.disabled = !openableSingle;
|
||||
elements.editButton.classList.toggle("hidden", isMulti || items.length !== 1 || items[0].kind !== "file");
|
||||
elements.editButton.classList.toggle("hidden", isMulti || items.length !== 1 || items[0].kind !== "file" || remoteSelection);
|
||||
elements.editButton.disabled = !editableSingle;
|
||||
elements.downloadButton.classList.remove("hidden");
|
||||
elements.downloadButton.disabled = !downloadableSelection;
|
||||
elements.renameButton.classList.toggle("hidden", isMulti);
|
||||
elements.renameButton.classList.toggle("hidden", isMulti || remoteSelection);
|
||||
elements.duplicateButton.classList.remove("hidden");
|
||||
elements.duplicateButton.disabled = items.length === 0;
|
||||
elements.duplicateButton.disabled = remoteSelection || items.length === 0;
|
||||
elements.copyButton.classList.remove("hidden");
|
||||
elements.copyButton.disabled = items.length === 0;
|
||||
elements.copyButton.disabled = remoteSelection || items.length === 0;
|
||||
elements.moveButton.classList.remove("hidden");
|
||||
elements.moveButton.disabled = remoteSelection || items.length === 0;
|
||||
elements.deleteButton.classList.remove("hidden");
|
||||
elements.deleteButton.disabled = remoteSelection || items.length === 0;
|
||||
elements.propertiesButton.classList.remove("hidden");
|
||||
elements.propertiesButton.disabled = items.length === 0;
|
||||
|
||||
const menuWidth = 220;
|
||||
const menuHeight = 120;
|
||||
const x = Math.min(event.clientX, window.innerWidth - menuWidth - 12);
|
||||
const y = Math.min(event.clientY, window.innerHeight - menuHeight - 12);
|
||||
elements.menu.style.left = `${Math.max(8, x)}px`;
|
||||
elements.menu.style.top = `${Math.max(8, y)}px`;
|
||||
elements.menu.classList.remove("hidden");
|
||||
positionContextMenu(elements.menu, event.currentTarget, event);
|
||||
}
|
||||
|
||||
function positionContextMenu(menu, rowElement, event) {
|
||||
if (!menu) {
|
||||
return;
|
||||
}
|
||||
const paneElement = rowElement instanceof Element ? rowElement.closest(".pane") : null;
|
||||
const paneRect = paneElement ? paneElement.getBoundingClientRect() : null;
|
||||
const rowRect = rowElement instanceof Element ? rowElement.getBoundingClientRect() : null;
|
||||
const menuRect = menu.getBoundingClientRect();
|
||||
const viewportPadding = 8;
|
||||
const panePadding = 8;
|
||||
|
||||
const minLeft = paneRect ? Math.max(viewportPadding, paneRect.left + panePadding) : viewportPadding;
|
||||
const maxLeft = paneRect
|
||||
? Math.max(minLeft, Math.min(window.innerWidth - viewportPadding - menuRect.width, paneRect.right - panePadding - menuRect.width))
|
||||
: Math.max(minLeft, window.innerWidth - viewportPadding - menuRect.width);
|
||||
const preferredLeft = rowRect ? rowRect.left + 12 : event.clientX;
|
||||
const left = Math.max(minLeft, Math.min(maxLeft, preferredLeft));
|
||||
|
||||
const paneTop = paneRect ? paneRect.top + panePadding : viewportPadding;
|
||||
const paneBottom = paneRect ? paneRect.bottom - panePadding : window.innerHeight - viewportPadding;
|
||||
const rowTop = rowRect ? rowRect.top : event.clientY;
|
||||
const rowBottom = rowRect ? rowRect.bottom : event.clientY;
|
||||
const spaceBelow = paneBottom - rowBottom;
|
||||
const spaceAbove = rowTop - paneTop;
|
||||
|
||||
let top;
|
||||
if (spaceBelow >= menuRect.height || spaceBelow >= spaceAbove) {
|
||||
top = rowBottom;
|
||||
} else if (spaceAbove >= menuRect.height) {
|
||||
top = rowTop - menuRect.height;
|
||||
} else {
|
||||
top = Math.max(paneTop, Math.min(paneBottom - menuRect.height, rowBottom));
|
||||
}
|
||||
top = Math.max(paneTop, Math.min(top, paneBottom - menuRect.height));
|
||||
|
||||
menu.style.left = `${left}px`;
|
||||
menu.style.top = `${top}px`;
|
||||
}
|
||||
|
||||
function applyContextMenuSelection() {
|
||||
@@ -903,17 +1011,23 @@ async function startDownloadSelected() {
|
||||
setStatus(`Download started: ${task.destination}`);
|
||||
return;
|
||||
}
|
||||
const { blob, fileName } = await downloadFileRequest(selectedPaths);
|
||||
const url = URL.createObjectURL(blob);
|
||||
const anchor = document.createElement("a");
|
||||
anchor.href = url;
|
||||
anchor.download = fileName || selected.name;
|
||||
document.body.append(anchor);
|
||||
anchor.click();
|
||||
anchor.remove();
|
||||
URL.revokeObjectURL(url);
|
||||
markSingleFileDownloadRequested(anchor.download, selected.path);
|
||||
setStatus(`Download requested: ${anchor.download}`);
|
||||
let fileName = selected.name;
|
||||
if (isRemoteBrowsePath(selected.path)) {
|
||||
fileName = startDirectSingleFileDownload(selected.path, selected.name).fileName || selected.name;
|
||||
} else {
|
||||
const response = await downloadFileRequest(selectedPaths);
|
||||
const url = URL.createObjectURL(response.blob);
|
||||
const anchor = document.createElement("a");
|
||||
anchor.href = url;
|
||||
anchor.download = response.fileName || selected.name;
|
||||
document.body.append(anchor);
|
||||
anchor.click();
|
||||
anchor.remove();
|
||||
URL.revokeObjectURL(url);
|
||||
fileName = anchor.download || selected.name;
|
||||
}
|
||||
markSingleFileDownloadRequested(fileName, selected.path);
|
||||
setStatus(`Download requested: ${fileName}`);
|
||||
} catch (err) {
|
||||
if (zipDownload) {
|
||||
if (err.code === "download_cancelled") {
|
||||
@@ -1222,6 +1336,18 @@ async function downloadFileRequest(paths) {
|
||||
};
|
||||
}
|
||||
|
||||
function startDirectSingleFileDownload(path, fallbackName) {
|
||||
const anchor = document.createElement("a");
|
||||
anchor.href = `/api/files/download?${new URLSearchParams({ path }).toString()}`;
|
||||
anchor.download = fallbackName || "";
|
||||
document.body.append(anchor);
|
||||
anchor.click();
|
||||
anchor.remove();
|
||||
return {
|
||||
fileName: anchor.download || fallbackName || null,
|
||||
};
|
||||
}
|
||||
|
||||
async function createArchiveDownloadTask(paths) {
|
||||
return apiRequest("POST", "/api/files/download/archive-prepare", { paths });
|
||||
}
|
||||
@@ -2050,12 +2176,18 @@ function updateActionButtons() {
|
||||
const hasSelection = count > 0;
|
||||
const exactlyOne = count === 1;
|
||||
const allFiles = hasSelection && selectedItems.every((item) => item.kind === "file");
|
||||
document.getElementById("view-btn").disabled = !exactlyOne || !allFiles;
|
||||
document.getElementById("edit-btn").disabled = !exactlyOne || !allFiles || !isEditableSelection(selectedItems[0] || null);
|
||||
document.getElementById("rename-btn").disabled = !exactlyOne;
|
||||
document.getElementById("delete-btn").disabled = !hasSelection;
|
||||
document.getElementById("copy-btn").disabled = !hasSelection;
|
||||
document.getElementById("move-btn").disabled = !hasSelection;
|
||||
const remoteBrowse = isRemoteBrowsePath(activePaneState().currentPath);
|
||||
const remoteViewable = exactlyOne && isRemoteViewableSelection(selectedItems[0] || null);
|
||||
document.getElementById("view-btn").disabled = remoteBrowse ? !remoteViewable : !exactlyOne || !allFiles;
|
||||
document.getElementById("edit-btn").disabled = remoteBrowse || !exactlyOne || !allFiles || !isEditableSelection(selectedItems[0] || null);
|
||||
document.getElementById("rename-btn").disabled = remoteBrowse || !exactlyOne;
|
||||
document.getElementById("delete-btn").disabled = remoteBrowse || !hasSelection;
|
||||
document.getElementById("copy-btn").disabled = remoteBrowse || !hasSelection;
|
||||
document.getElementById("move-btn").disabled = remoteBrowse || !hasSelection;
|
||||
document.getElementById("mkdir-btn").disabled = remoteBrowse;
|
||||
document.getElementById("upload-btn").disabled = remoteBrowse;
|
||||
document.getElementById("upload-menu-toggle").disabled = remoteBrowse;
|
||||
document.getElementById("upload-folder-btn").disabled = remoteBrowse;
|
||||
}
|
||||
|
||||
function isEditableSelection(item) {
|
||||
@@ -2208,7 +2340,7 @@ function currentParentPath(path) {
|
||||
if (!normalized) {
|
||||
return null;
|
||||
}
|
||||
if (normalized === "/Volumes") {
|
||||
if (normalized === "/Volumes" || normalized === "/Clients") {
|
||||
return null;
|
||||
}
|
||||
if (normalized.startsWith("/")) {
|
||||
@@ -2287,16 +2419,17 @@ function renderBreadcrumbs(pane, path) {
|
||||
const isHostPath = normalized.startsWith("/");
|
||||
const parts = normalized.split("/").filter(Boolean);
|
||||
if (isHostPath) {
|
||||
const rootTarget = parts.length > 0 ? `/${parts[0]}` : "/Volumes";
|
||||
const rootCrumb = createButton("/", () => {
|
||||
setActivePane(pane);
|
||||
navigateTo(pane, "/Volumes");
|
||||
navigateTo(pane, rootTarget);
|
||||
});
|
||||
rootCrumb.type = "button";
|
||||
rootCrumb.onclick = (ev) => {
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
setActivePane(pane);
|
||||
navigateTo(pane, "/Volumes");
|
||||
navigateTo(pane, rootTarget);
|
||||
};
|
||||
nav.append(rootCrumb);
|
||||
if (parts.length > 0) {
|
||||
@@ -2619,6 +2752,7 @@ async function loadBrowsePane(pane) {
|
||||
});
|
||||
const data = await apiRequest("GET", `/api/browse?${query.toString()}`);
|
||||
model.currentPath = data.path;
|
||||
syncSourceSwitchers();
|
||||
renderBreadcrumbs(pane, data.path);
|
||||
|
||||
const visibleItems = [];
|
||||
@@ -2682,6 +2816,8 @@ function navigateTo(pane, path) {
|
||||
model.currentRowIndex = 0;
|
||||
clearSelectionAnchor(pane);
|
||||
setSelectedItem(pane, null);
|
||||
syncSourceSwitchers();
|
||||
updateActionButtons();
|
||||
loadBrowsePane(pane);
|
||||
}
|
||||
|
||||
@@ -4625,6 +4761,14 @@ function openViewer() {
|
||||
return;
|
||||
}
|
||||
const selected = selectedItems[0];
|
||||
if (isRemoteBrowsePath(selected.path)) {
|
||||
if (isImageSelection(selected)) {
|
||||
openImageViewer();
|
||||
return;
|
||||
}
|
||||
openTextViewer();
|
||||
return;
|
||||
}
|
||||
if (isImageSelection(selected)) {
|
||||
openImageViewer();
|
||||
return;
|
||||
@@ -4726,7 +4870,7 @@ function openCurrentDirectory() {
|
||||
openImageViewer();
|
||||
return;
|
||||
}
|
||||
if (isVideoSelection(item)) {
|
||||
if (!isRemoteBrowsePath(item.path) && isVideoSelection(item)) {
|
||||
openVideoViewer();
|
||||
}
|
||||
}
|
||||
@@ -5305,6 +5449,7 @@ async function init() {
|
||||
setError("actions-error", "");
|
||||
applyTheme("default", "dark");
|
||||
setActivePane("left");
|
||||
ensureSourceSwitchers();
|
||||
setupEvents();
|
||||
await loadSettings();
|
||||
applyTheme(settingsState.selectedTheme, settingsState.selectedColorMode);
|
||||
|
||||
+8
-12
@@ -907,7 +907,11 @@ button:disabled {
|
||||
|
||||
.context-menu {
|
||||
position: fixed;
|
||||
min-width: 220px;
|
||||
display: inline-flex;
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
width: max-content;
|
||||
max-width: min(196px, calc(100vw - 24px));
|
||||
padding: 8px;
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: var(--radius-sm);
|
||||
@@ -924,24 +928,16 @@ button:disabled {
|
||||
color: var(--color-text-muted);
|
||||
}
|
||||
|
||||
.context-menu-target {
|
||||
margin-top: 4px;
|
||||
font-size: 12px;
|
||||
color: var(--color-text-primary);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.context-menu-separator {
|
||||
height: 1px;
|
||||
margin: 8px 0;
|
||||
margin: 6px 0 8px;
|
||||
background: var(--color-border);
|
||||
}
|
||||
|
||||
.context-menu button {
|
||||
width: 100%;
|
||||
width: auto;
|
||||
justify-content: flex-start;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
#upload-modal .popup-card {
|
||||
|
||||
@@ -161,7 +161,6 @@
|
||||
|
||||
<div id="context-menu" class="context-menu hidden" role="menu" aria-label="Item context menu">
|
||||
<div id="context-menu-scope" class="context-menu-scope"></div>
|
||||
<div id="context-menu-target" class="context-menu-target"></div>
|
||||
<div class="context-menu-separator"></div>
|
||||
<button id="context-menu-open-btn" type="button" role="menuitem">Open</button>
|
||||
<button id="context-menu-edit-btn" type="button" role="menuitem">Edit</button>
|
||||
|
||||
Reference in New Issue
Block a user