fase 6 - Controlled Rename Execute API (bevestigde rename op basis van preview) afgerond
This commit is contained in:
@@ -2,7 +2,8 @@ APP_HOST=0.0.0.0
|
||||
APP_PORT=8080
|
||||
|
||||
APP_DATA_DIR=/app/data
|
||||
MEDIA_ROOT=/data/media
|
||||
#MEDIA_ROOT=/data/media
|
||||
ALLOWED_MEDIA_ROOTS=/Volumes/8TB/Shared_Folders/TV_Shows,/Volumes/8TB_RAID1/Shared_Folders/Library/TV_Shows
|
||||
|
||||
TVDB_API_KEY=2c951d0c-0b7e-405b-bdb2-e250491dc69d
|
||||
TVDB_PIN=
|
||||
|
||||
@@ -157,3 +157,16 @@ def get_filename_preview(session_id: str = Query("default", min_length=1)):
|
||||
return service.build_filename_preview(normalized_session_id)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc))
|
||||
|
||||
|
||||
@router.post("/rename-execute")
|
||||
def rename_execute(
|
||||
session_id: str = Query("default", min_length=1),
|
||||
confirm: bool = Query(False),
|
||||
):
|
||||
service = SessionService()
|
||||
normalized_session_id = _normalize_session_id(session_id)
|
||||
try:
|
||||
return service.execute_rename(normalized_session_id, confirm=confirm)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc))
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
@@ -401,3 +402,152 @@ class SessionService:
|
||||
"template": "{series} ({year}) - S{season:02}E{episode:02} - {title}{ext}",
|
||||
"items": previews,
|
||||
}
|
||||
|
||||
def execute_rename(self, session_id: str, confirm: bool) -> dict:
|
||||
if not confirm:
|
||||
raise ValueError("confirm=true is required to execute rename")
|
||||
|
||||
preview = self.build_filename_preview(session_id)
|
||||
allowed_roots = self._allowed_media_roots()
|
||||
preflight_items = []
|
||||
preflight_errors = 0
|
||||
|
||||
for item in preview["items"]:
|
||||
source_path_str = str(item["file"].get("path") or "").strip()
|
||||
proposed_filename = item["proposed_filename"]
|
||||
source_path = Path(source_path_str)
|
||||
|
||||
destination_path = source_path.with_name(proposed_filename) if source_path_str else Path("")
|
||||
errors = self._preflight_errors(
|
||||
source_path=source_path,
|
||||
destination_path=destination_path,
|
||||
proposed_filename=proposed_filename,
|
||||
allowed_roots=allowed_roots,
|
||||
)
|
||||
|
||||
status = "ready"
|
||||
if errors:
|
||||
status = "preflight_error"
|
||||
preflight_errors += 1
|
||||
|
||||
preflight_items.append(
|
||||
{
|
||||
"index": item["index"],
|
||||
"episode_selection_id": item["episode_selection_id"],
|
||||
"file_selection_id": item["file_selection_id"],
|
||||
"source_path": source_path_str,
|
||||
"destination_path": str(destination_path) if source_path_str else "",
|
||||
"proposed_filename": proposed_filename,
|
||||
"status": status,
|
||||
"errors": errors,
|
||||
}
|
||||
)
|
||||
|
||||
if preflight_errors > 0:
|
||||
return {
|
||||
"session_id": session_id,
|
||||
"confirm": confirm,
|
||||
"executed": False,
|
||||
"preflight_ok": False,
|
||||
"counts": preview["counts"],
|
||||
"items": preflight_items,
|
||||
}
|
||||
|
||||
results = []
|
||||
for item in preflight_items:
|
||||
source_path = Path(item["source_path"])
|
||||
destination_path = Path(item["destination_path"])
|
||||
os.replace(str(source_path), str(destination_path))
|
||||
results.append(
|
||||
{
|
||||
**item,
|
||||
"status": "renamed",
|
||||
"errors": [],
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"session_id": session_id,
|
||||
"confirm": confirm,
|
||||
"executed": True,
|
||||
"preflight_ok": True,
|
||||
"counts": preview["counts"],
|
||||
"items": results,
|
||||
}
|
||||
|
||||
def _allowed_media_roots(self) -> list[Path]:
|
||||
raw = os.getenv("ALLOWED_MEDIA_ROOTS", "").strip()
|
||||
if raw:
|
||||
candidates = [p.strip() for p in raw.split(",") if p.strip()]
|
||||
else:
|
||||
media_root = os.getenv("MEDIA_ROOT", "").strip()
|
||||
if media_root:
|
||||
candidates = [media_root]
|
||||
else:
|
||||
candidates = [
|
||||
"/Volumes/8TB/Shared_Folders/TV_Shows",
|
||||
"/Volumes/8TB_RAID1/Shared_Folders/Library/TV_Shows",
|
||||
]
|
||||
|
||||
roots = []
|
||||
for candidate in candidates:
|
||||
try:
|
||||
roots.append(Path(candidate).resolve())
|
||||
except Exception:
|
||||
continue
|
||||
return roots
|
||||
|
||||
def _is_within_allowed_roots(self, path: Path, allowed_roots: list[Path]) -> bool:
|
||||
try:
|
||||
resolved = path.resolve()
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
for root in allowed_roots:
|
||||
try:
|
||||
resolved.relative_to(root)
|
||||
return True
|
||||
except ValueError:
|
||||
continue
|
||||
return False
|
||||
|
||||
def _preflight_errors(
|
||||
self,
|
||||
source_path: Path,
|
||||
destination_path: Path,
|
||||
proposed_filename: str,
|
||||
allowed_roots: list[Path],
|
||||
) -> list[str]:
|
||||
errors = []
|
||||
|
||||
if not str(source_path):
|
||||
errors.append("source path missing")
|
||||
return errors
|
||||
|
||||
if ".." in source_path.parts:
|
||||
errors.append("source path traversal is not allowed")
|
||||
if ".." in Path(proposed_filename).parts:
|
||||
errors.append("destination filename traversal is not allowed")
|
||||
if Path(proposed_filename).name != proposed_filename:
|
||||
errors.append("destination filename must not contain path separators")
|
||||
|
||||
if not self._is_within_allowed_roots(source_path, allowed_roots):
|
||||
errors.append("source path is outside allowed media roots")
|
||||
if not self._is_within_allowed_roots(destination_path, allowed_roots):
|
||||
errors.append("destination path is outside allowed media roots")
|
||||
|
||||
if not source_path.exists():
|
||||
errors.append("source file does not exist")
|
||||
if source_path.exists() and not source_path.is_file():
|
||||
errors.append("source path is not a file")
|
||||
|
||||
if source_path == destination_path:
|
||||
errors.append("source and destination paths are equal")
|
||||
|
||||
if destination_path.exists():
|
||||
errors.append("destination file already exists")
|
||||
|
||||
if not destination_path.parent.exists():
|
||||
errors.append("destination parent directory does not exist")
|
||||
|
||||
return errors
|
||||
|
||||
Binary file not shown.
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"token": null,
|
||||
"token_type": "Bearer",
|
||||
"issued_at": null,
|
||||
"expires_at": null,
|
||||
"expires_at_unix": null,
|
||||
"renew_after": null,
|
||||
"renew_after_unix": null,
|
||||
"last_login_attempt_at": null,
|
||||
"last_login_success_at": null,
|
||||
"last_login_status": null,
|
||||
"last_login_error": null,
|
||||
"jwt_payload": {}
|
||||
}
|
||||
+14
-10
@@ -1,14 +1,18 @@
|
||||
{
|
||||
"token": null,
|
||||
"token": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJhZ2UiOiIiLCJhcGlrZXkiOiIyYzk1MWQwYy0wYjdlLTQwNWItYmRiMi1lMjUwNDkxZGM2OWQiLCJjb21tdW5pdHlfc3VwcG9ydGVkIjpmYWxzZSwiZXhwIjoxNzc1NTIwNjgyLCJnZW5kZXIiOiIiLCJoaXRzX3Blcl9kYXkiOjEwMDAwMDAwMCwiaGl0c19wZXJfbW9udGgiOjEwMDAwMDAwMCwiaWQiOiIyOTYyODU5IiwiaXNfbW9kIjpmYWxzZSwiaXNfc3lzdGVtX2tleSI6ZmFsc2UsImlzX3RydXN0ZWQiOmZhbHNlLCJwaW4iOm51bGwsInJvbGVzIjpbXSwidGVuYW50IjoidHZkYiIsInV1aWQiOiIifQ.iwXEe7EYpxh8VzO8UPa2lfdPHvjJaQvl1P1TtF6BqZjaiQiCzmiY_jHZkQIzYHvNF0cViNWO-ZmyAIScocvVA7C2DroOBU0xATha9rcfHO1L3Aqzq6ne6nq-efsjvoaJ2bsm73_mpebXEeURa5F9_kNoto1ghIDYpMDH64Q_RisP5cJZFi7580OR9W3ubTq65UZxH8M4cyVefShsWDXI8q5ujU7Iuddm_vZ6bAOd4g36zppyQ3EragsOOZQ2IzCERlphJLlOfr0RN2AYMDusjk-b-ycZNyOBSzrmCatIcy1NzFC8qE2sy8pLD9iwuh_hujcZIA0ZRxo8XPQQH-zWqY7WY5HVI1W0KdUtbgaWosa9jZmhcIL3q2XkxrRTbObAY636VXxCSSYMb765jcHXX0YWIoiGH1wuCHMrXsUqnRd1eGYrZxuKIUTVYZ6bKWyruCJEmF_EOU01zDWTMVkmRFJCVo8cMqxYi9fUc1yaeS3aNA7djeYu2Gd3FQLrK50z-8N_i1ZgKEWQJcw6EWn4RICzVePTct1WiUhZJzxoNabNB4EElWoLPFVPSElV_G_oA0jIDr6_lxM4HbYljdf7jYCj0BwhqXWZB5u5RGR_73qm43L2-EI3N6smf565HtIMgtkigAPfhIFwMKeAnGMM8sbL2dJGbwBXBQZnpX-7uQA",
|
||||
"token_type": "Bearer",
|
||||
"issued_at": null,
|
||||
"expires_at": null,
|
||||
"expires_at_unix": null,
|
||||
"renew_after": null,
|
||||
"renew_after_unix": null,
|
||||
"last_login_attempt_at": null,
|
||||
"last_login_success_at": null,
|
||||
"last_login_status": null,
|
||||
"expires_at": "2026-04-07T00:11:22+00:00",
|
||||
"expires_at_unix": 1775520682,
|
||||
"renew_after": "2026-04-04T00:11:22+00:00",
|
||||
"renew_after_unix": 1775261482,
|
||||
"last_login_attempt_at": "2026-03-07T14:11:22+00:00",
|
||||
"last_login_success_at": "2026-03-07T14:11:22+00:00",
|
||||
"last_login_status": "ok",
|
||||
"last_login_error": null,
|
||||
"jwt_payload": {}
|
||||
}
|
||||
"jwt_payload": {
|
||||
"exp": 1775520682,
|
||||
"iat": null
|
||||
},
|
||||
"token_source": "cached"
|
||||
}
|
||||
Executable
+216
@@ -0,0 +1,216 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if [ -z "${BASE_URL:-}" ]; then
|
||||
if curl --silent --fail http://127.0.0.1:8085/api/health >/dev/null 2>&1; then
|
||||
BASE_URL="http://127.0.0.1:8085"
|
||||
elif curl --silent --fail http://host.containers.internal:8085/api/health >/dev/null 2>&1; then
|
||||
BASE_URL="http://host.containers.internal:8085"
|
||||
else
|
||||
echo "ERROR: could not determine BASE_URL. Tried 127.0.0.1 and host.containers.internal." >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "${TEST_MEDIA_ROOT:-}" ]; then
|
||||
for candidate in \
|
||||
"/Volumes/8TB/Shared_Folders/TV_Shows" \
|
||||
"/Volumes/8TB_RAID1/Shared_Folders/Library/TV_Shows"
|
||||
do
|
||||
if [ -d "$candidate" ] && [ -w "$candidate" ]; then
|
||||
TEST_MEDIA_ROOT="$candidate"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [ -z "${TEST_MEDIA_ROOT:-}" ]; then
|
||||
echo "ERROR: no writable allowed media root found. Set TEST_MEDIA_ROOT." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TEST_DIR="${TEST_MEDIA_ROOT}/_rename_mvp_phase6_$(date +%s)_$$"
|
||||
mkdir -p "${TEST_DIR}"
|
||||
|
||||
TMP_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf "$TMP_DIR"' EXIT
|
||||
|
||||
SESSION_ID_1="rename-exec-ok-$(date +%s)-$$"
|
||||
SESSION_ID_2="rename-exec-no-confirm-$(date +%s)-$$"
|
||||
SESSION_ID_3="rename-exec-preflight-$(date +%s)-$$"
|
||||
|
||||
clear_session() {
|
||||
local sid="$1"
|
||||
curl --fail --silent --show-error -X DELETE \
|
||||
"${BASE_URL}/api/session/selected-episodes?session_id=${sid}" \
|
||||
>/dev/null
|
||||
curl --fail --silent --show-error -X DELETE \
|
||||
"${BASE_URL}/api/session/selected-files?session_id=${sid}" \
|
||||
>/dev/null
|
||||
}
|
||||
|
||||
add_payloads() {
|
||||
local sid="$1"
|
||||
local file1="$2"
|
||||
local file2="$3"
|
||||
|
||||
cat > "${TMP_DIR}/episodes_${sid}.json" <<'JSON'
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"series": "Elsbeth",
|
||||
"year": "2024",
|
||||
"season_number": 1,
|
||||
"episode_number": 1,
|
||||
"title": "Pilot"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"series": "Elsbeth",
|
||||
"year": "2024",
|
||||
"season_number": 1,
|
||||
"episode_number": 2,
|
||||
"title": "Second Episode"
|
||||
}
|
||||
]
|
||||
}
|
||||
JSON
|
||||
|
||||
cat > "${TMP_DIR}/files_${sid}.json" <<JSON
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"path": "${file1}",
|
||||
"name": "$(basename "${file1}")"
|
||||
},
|
||||
{
|
||||
"path": "${file2}",
|
||||
"name": "$(basename "${file2}")"
|
||||
}
|
||||
]
|
||||
}
|
||||
JSON
|
||||
|
||||
curl --fail --silent --show-error \
|
||||
-X POST "${BASE_URL}/api/session/selected-episodes?session_id=${sid}" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data @"${TMP_DIR}/episodes_${sid}.json" \
|
||||
>/dev/null
|
||||
|
||||
curl --fail --silent --show-error \
|
||||
-X POST "${BASE_URL}/api/session/selected-files?session_id=${sid}" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data @"${TMP_DIR}/files_${sid}.json" \
|
||||
>/dev/null
|
||||
}
|
||||
|
||||
echo "== Feature test 1: confirmed execute performs renames =="
|
||||
SRC1="${TEST_DIR}/src1.mkv"
|
||||
SRC2="${TEST_DIR}/src2.mp4"
|
||||
printf "a" > "${SRC1}"
|
||||
printf "b" > "${SRC2}"
|
||||
|
||||
clear_session "${SESSION_ID_1}"
|
||||
add_payloads "${SESSION_ID_1}" "${SRC1}" "${SRC2}"
|
||||
|
||||
curl --fail --silent --show-error \
|
||||
-X POST "${BASE_URL}/api/session/rename-execute?session_id=${SESSION_ID_1}&confirm=true" \
|
||||
-o "${TMP_DIR}/rename_ok.json"
|
||||
|
||||
cat "${TMP_DIR}/rename_ok.json"
|
||||
|
||||
python3 - "${TMP_DIR}/rename_ok.json" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
data = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
|
||||
assert data.get("executed") is True, "execute should be true"
|
||||
assert data.get("preflight_ok") is True, "preflight should be true"
|
||||
assert len(data.get("items", [])) == 2, "expected 2 item results"
|
||||
assert all(item.get("status") == "renamed" for item in data["items"]), "all items should be renamed"
|
||||
print("confirmed rename validation passed")
|
||||
PY
|
||||
|
||||
DST1="${TEST_DIR}/Elsbeth (2024) - S01E01 - Pilot.mkv"
|
||||
DST2="${TEST_DIR}/Elsbeth (2024) - S01E02 - Second Episode.mp4"
|
||||
test ! -f "${SRC1}"
|
||||
test ! -f "${SRC2}"
|
||||
test -f "${DST1}"
|
||||
test -f "${DST2}"
|
||||
|
||||
echo
|
||||
echo "== Feature test 2: missing confirm fails safely without writes =="
|
||||
SRC3="${TEST_DIR}/src3.mkv"
|
||||
SRC4="${TEST_DIR}/src4.mp4"
|
||||
printf "c" > "${SRC3}"
|
||||
printf "d" > "${SRC4}"
|
||||
|
||||
clear_session "${SESSION_ID_2}"
|
||||
add_payloads "${SESSION_ID_2}" "${SRC3}" "${SRC4}"
|
||||
|
||||
curl --silent --show-error \
|
||||
-o "${TMP_DIR}/rename_no_confirm.json" \
|
||||
-w "%{http_code}" \
|
||||
-X POST "${BASE_URL}/api/session/rename-execute?session_id=${SESSION_ID_2}" \
|
||||
> "${TMP_DIR}/rename_no_confirm.status"
|
||||
|
||||
cat "${TMP_DIR}/rename_no_confirm.json"
|
||||
|
||||
python3 - "${TMP_DIR}/rename_no_confirm.status" "${TMP_DIR}/rename_no_confirm.json" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
status = Path(sys.argv[1]).read_text(encoding="utf-8").strip()
|
||||
data = json.loads(Path(sys.argv[2]).read_text(encoding="utf-8"))
|
||||
assert status == "400", f"expected HTTP 400, got {status}"
|
||||
assert "detail" in data, "error response missing detail"
|
||||
assert "confirm=true" in data["detail"], "detail should mention confirm=true requirement"
|
||||
print("missing confirm validation passed")
|
||||
PY
|
||||
|
||||
test -f "${SRC3}"
|
||||
test -f "${SRC4}"
|
||||
|
||||
echo
|
||||
echo "== Feature test 3: preflight failure blocks all writes =="
|
||||
SRC5="${TEST_DIR}/src5.mkv"
|
||||
SRC6="${TEST_DIR}/src6.mp4"
|
||||
printf "e" > "${SRC5}"
|
||||
printf "f" > "${SRC6}"
|
||||
|
||||
# Force destination conflict for first mapping.
|
||||
CONFLICT1="${TEST_DIR}/Elsbeth (2024) - S01E01 - Pilot.mkv"
|
||||
printf "existing" > "${CONFLICT1}"
|
||||
|
||||
clear_session "${SESSION_ID_3}"
|
||||
add_payloads "${SESSION_ID_3}" "${SRC5}" "${SRC6}"
|
||||
|
||||
curl --fail --silent --show-error \
|
||||
-X POST "${BASE_URL}/api/session/rename-execute?session_id=${SESSION_ID_3}&confirm=true" \
|
||||
-o "${TMP_DIR}/rename_preflight_fail.json"
|
||||
|
||||
cat "${TMP_DIR}/rename_preflight_fail.json"
|
||||
|
||||
python3 - "${TMP_DIR}/rename_preflight_fail.json" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
data = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
|
||||
assert data.get("executed") is False, "execute should be false on preflight failure"
|
||||
assert data.get("preflight_ok") is False, "preflight should be false"
|
||||
items = data.get("items", [])
|
||||
assert len(items) == 2, "expected 2 item statuses"
|
||||
assert any("destination file already exists" in " ".join(i.get("errors", [])) for i in items), \
|
||||
"expected destination conflict error"
|
||||
print("preflight failure validation passed")
|
||||
PY
|
||||
|
||||
test -f "${SRC5}"
|
||||
test -f "${SRC6}"
|
||||
|
||||
echo
|
||||
echo "All rename execute feature tests passed."
|
||||
Reference in New Issue
Block a user