fase 7 afgeronf

This commit is contained in:
kodi
2026-03-07 15:50:58 +01:00
parent cfd18d7535
commit 1ed4d5cf52
4 changed files with 487 additions and 2 deletions
+23
View File
@@ -170,3 +170,26 @@ def rename_execute(
return service.execute_rename(normalized_session_id, confirm=confirm)
except ValueError as exc:
raise HTTPException(status_code=400, detail=str(exc))
@router.get("/rename-log")
def get_rename_log(
session_id: str = Query("default", min_length=1),
limit: int = Query(20, ge=1, le=200),
):
service = SessionService()
normalized_session_id = _normalize_session_id(session_id)
items = service.list_rename_runs(normalized_session_id, limit=limit)
return {
"session_id": normalized_session_id,
"items": items,
}
@router.get("/rename-log/{run_id}")
def get_rename_log_run(run_id: int):
service = SessionService()
run = service.get_rename_run(run_id)
if run is None:
raise HTTPException(status_code=404, detail="rename run not found")
return run
+253 -2
View File
@@ -1,6 +1,8 @@
import json
import os
import sqlite3
import time
from datetime import datetime, timezone
from pathlib import Path
from app.config import APP_DATA_DIR
@@ -63,6 +65,50 @@ class SessionService:
ON selected_files(session_id)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS rename_runs (
run_id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
created_at TEXT NOT NULL,
confirm INTEGER NOT NULL,
executed INTEGER NOT NULL,
preflight_ok INTEGER NOT NULL,
episodes_count INTEGER NOT NULL,
files_count INTEGER NOT NULL,
duration_ms INTEGER NOT NULL
)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_rename_runs_session
ON rename_runs(session_id)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS rename_run_items (
id INTEGER PRIMARY KEY AUTOINCREMENT,
run_id INTEGER NOT NULL,
item_index INTEGER NOT NULL,
episode_selection_id INTEGER,
file_selection_id INTEGER,
source_path TEXT NOT NULL,
destination_path TEXT NOT NULL,
proposed_filename TEXT NOT NULL,
status TEXT NOT NULL,
errors_json TEXT NOT NULL,
FOREIGN KEY(run_id) REFERENCES rename_runs(run_id)
)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_rename_run_items_run
ON rename_run_items(run_id, item_index)
"""
)
def list_selected_episodes(self, session_id: str) -> list[dict]:
with self._connect() as conn:
@@ -407,6 +453,7 @@ class SessionService:
if not confirm:
raise ValueError("confirm=true is required to execute rename")
started_at = time.perf_counter()
preview = self.build_filename_preview(session_id)
allowed_roots = self._allowed_media_roots()
preflight_items = []
@@ -444,7 +491,7 @@ class SessionService:
)
if preflight_errors > 0:
return {
result = {
"session_id": session_id,
"confirm": confirm,
"executed": False,
@@ -452,6 +499,12 @@ class SessionService:
"counts": preview["counts"],
"items": preflight_items,
}
self._log_rename_run(
session_id=session_id,
result=result,
duration_ms=int((time.perf_counter() - started_at) * 1000),
)
return result
results = []
for item in preflight_items:
@@ -466,7 +519,7 @@ class SessionService:
}
)
return {
result = {
"session_id": session_id,
"confirm": confirm,
"executed": True,
@@ -474,6 +527,204 @@ class SessionService:
"counts": preview["counts"],
"items": results,
}
self._log_rename_run(
session_id=session_id,
result=result,
duration_ms=int((time.perf_counter() - started_at) * 1000),
)
return result
def _log_rename_run(self, session_id: str, result: dict, duration_ms: int) -> None:
created_at = datetime.now(timezone.utc).isoformat()
counts = result.get("counts", {})
with self._connect() as conn:
cursor = conn.execute(
"""
INSERT INTO rename_runs (
session_id,
created_at,
confirm,
executed,
preflight_ok,
episodes_count,
files_count,
duration_ms
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""",
(
session_id,
created_at,
1 if result.get("confirm") else 0,
1 if result.get("executed") else 0,
1 if result.get("preflight_ok") else 0,
int(counts.get("episodes") or 0),
int(counts.get("files") or 0),
int(duration_ms),
),
)
run_id = int(cursor.lastrowid)
for item in result.get("items", []):
conn.execute(
"""
INSERT INTO rename_run_items (
run_id,
item_index,
episode_selection_id,
file_selection_id,
source_path,
destination_path,
proposed_filename,
status,
errors_json
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
run_id,
int(item.get("index") or 0),
item.get("episode_selection_id"),
item.get("file_selection_id"),
str(item.get("source_path") or ""),
str(item.get("destination_path") or ""),
str(item.get("proposed_filename") or ""),
str(item.get("status") or ""),
json.dumps(item.get("errors") or [], ensure_ascii=True),
),
)
def list_rename_runs(self, session_id: str, limit: int = 20) -> list[dict]:
with self._connect() as conn:
rows = conn.execute(
"""
SELECT
run_id,
session_id,
created_at,
confirm,
executed,
preflight_ok,
episodes_count,
files_count,
duration_ms
FROM rename_runs
WHERE session_id = ?
ORDER BY run_id DESC
LIMIT ?
""",
(session_id, limit),
).fetchall()
item_counts = conn.execute(
"""
SELECT run_id, COUNT(*) AS item_count
FROM rename_run_items
WHERE run_id IN (
SELECT run_id
FROM rename_runs
WHERE session_id = ?
ORDER BY run_id DESC
LIMIT ?
)
GROUP BY run_id
""",
(session_id, limit),
).fetchall()
counts_by_run = {int(row["run_id"]): int(row["item_count"]) for row in item_counts}
runs = []
for row in rows:
run_id = int(row["run_id"])
runs.append(
{
"run_id": run_id,
"session_id": row["session_id"],
"created_at": row["created_at"],
"confirm": bool(row["confirm"]),
"executed": bool(row["executed"]),
"preflight_ok": bool(row["preflight_ok"]),
"counts": {
"episodes": int(row["episodes_count"]),
"files": int(row["files_count"]),
},
"duration_ms": int(row["duration_ms"]),
"items_count": counts_by_run.get(run_id, 0),
}
)
return runs
def get_rename_run(self, run_id: int) -> dict | None:
with self._connect() as conn:
run = conn.execute(
"""
SELECT
run_id,
session_id,
created_at,
confirm,
executed,
preflight_ok,
episodes_count,
files_count,
duration_ms
FROM rename_runs
WHERE run_id = ?
""",
(run_id,),
).fetchone()
if run is None:
return None
item_rows = conn.execute(
"""
SELECT
item_index,
episode_selection_id,
file_selection_id,
source_path,
destination_path,
proposed_filename,
status,
errors_json
FROM rename_run_items
WHERE run_id = ?
ORDER BY item_index ASC
""",
(run_id,),
).fetchall()
items = []
for row in item_rows:
items.append(
{
"index": int(row["item_index"]),
"episode_selection_id": row["episode_selection_id"],
"file_selection_id": row["file_selection_id"],
"source_path": row["source_path"],
"destination_path": row["destination_path"],
"proposed_filename": row["proposed_filename"],
"status": row["status"],
"errors": json.loads(row["errors_json"]),
}
)
return {
"run_id": int(run["run_id"]),
"session_id": run["session_id"],
"created_at": run["created_at"],
"confirm": bool(run["confirm"]),
"executed": bool(run["executed"]),
"preflight_ok": bool(run["preflight_ok"]),
"counts": {
"episodes": int(run["episodes_count"]),
"files": int(run["files_count"]),
},
"duration_ms": int(run["duration_ms"]),
"items": items,
}
def _allowed_media_roots(self) -> list[Path]:
raw = os.getenv("ALLOWED_MEDIA_ROOTS", "").strip()
Binary file not shown.
+211
View File
@@ -0,0 +1,211 @@
#!/usr/bin/env bash
set -euo pipefail
if [ -z "${BASE_URL:-}" ]; then
if curl --silent --fail http://127.0.0.1:8085/api/health >/dev/null 2>&1; then
BASE_URL="http://127.0.0.1:8085"
elif curl --silent --fail http://host.containers.internal:8085/api/health >/dev/null 2>&1; then
BASE_URL="http://host.containers.internal:8085"
else
echo "ERROR: could not determine BASE_URL. Tried 127.0.0.1 and host.containers.internal." >&2
exit 1
fi
fi
if [ -z "${TEST_MEDIA_ROOT:-}" ]; then
for candidate in \
"/Volumes/8TB/Shared_Folders/TV_Shows" \
"/Volumes/8TB_RAID1/Shared_Folders/Library/TV_Shows"
do
if [ -d "$candidate" ] && [ -w "$candidate" ]; then
TEST_MEDIA_ROOT="$candidate"
break
fi
done
fi
if [ -z "${TEST_MEDIA_ROOT:-}" ]; then
echo "ERROR: no writable allowed media root found. Set TEST_MEDIA_ROOT." >&2
exit 1
fi
TEST_DIR="${TEST_MEDIA_ROOT}/_rename_mvp_phase7_$(date +%s)_$$"
mkdir -p "${TEST_DIR}"
TMP_DIR="$(mktemp -d)"
trap 'rm -rf "$TMP_DIR"' EXIT
clear_session() {
local sid="$1"
curl --fail --silent --show-error -X DELETE \
"${BASE_URL}/api/session/selected-episodes?session_id=${sid}" \
>/dev/null
curl --fail --silent --show-error -X DELETE \
"${BASE_URL}/api/session/selected-files?session_id=${sid}" \
>/dev/null
}
add_payloads() {
local sid="$1"
local file1="$2"
local file2="$3"
cat > "${TMP_DIR}/episodes_${sid}.json" <<'JSON'
{
"items": [
{
"id": 1,
"series": "Elsbeth",
"year": "2024",
"season_number": 1,
"episode_number": 1,
"title": "Pilot"
},
{
"id": 2,
"series": "Elsbeth",
"year": "2024",
"season_number": 1,
"episode_number": 2,
"title": "Second Episode"
}
]
}
JSON
cat > "${TMP_DIR}/files_${sid}.json" <<JSON
{
"items": [
{
"path": "${file1}",
"name": "$(basename "${file1}")"
},
{
"path": "${file2}",
"name": "$(basename "${file2}")"
}
]
}
JSON
curl --fail --silent --show-error \
-X POST "${BASE_URL}/api/session/selected-episodes?session_id=${sid}" \
-H "Content-Type: application/json" \
--data @"${TMP_DIR}/episodes_${sid}.json" \
>/dev/null
curl --fail --silent --show-error \
-X POST "${BASE_URL}/api/session/selected-files?session_id=${sid}" \
-H "Content-Type: application/json" \
--data @"${TMP_DIR}/files_${sid}.json" \
>/dev/null
}
echo "== Feature test 1: successful rename run is logged with duration =="
SESSION_OK="rename-log-ok-$(date +%s)-$$"
SRC1="${TEST_DIR}/ok_src1.mkv"
SRC2="${TEST_DIR}/ok_src2.mp4"
printf "a" > "${SRC1}"
printf "b" > "${SRC2}"
clear_session "${SESSION_OK}"
add_payloads "${SESSION_OK}" "${SRC1}" "${SRC2}"
curl --fail --silent --show-error \
-X POST "${BASE_URL}/api/session/rename-execute?session_id=${SESSION_OK}&confirm=true" \
-o "${TMP_DIR}/rename_ok.json"
curl --fail --silent --show-error \
"${BASE_URL}/api/session/rename-log?session_id=${SESSION_OK}" \
-o "${TMP_DIR}/log_ok.json"
cat "${TMP_DIR}/log_ok.json"
python3 - "${TMP_DIR}/log_ok.json" > "${TMP_DIR}/run_id_ok.txt" <<'PY'
import json
import sys
from pathlib import Path
data = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
items = data.get("items", [])
assert len(items) >= 1, "expected at least one logged run"
run = items[0]
assert run.get("executed") is True, "expected executed=true"
assert run.get("preflight_ok") is True, "expected preflight_ok=true"
assert isinstance(run.get("duration_ms"), int), "duration_ms must be int"
assert run["duration_ms"] >= 0, "duration_ms must be >= 0"
assert run.get("items_count") == 2, "expected 2 item logs"
print(run["run_id"])
PY
RUN_ID_OK="$(cat "${TMP_DIR}/run_id_ok.txt")"
echo
echo "== Feature test 2: preflight-failed run is also logged =="
SESSION_FAIL="rename-log-fail-$(date +%s)-$$"
SRC3="${TEST_DIR}/fail_src1.mkv"
SRC4="${TEST_DIR}/fail_src2.mp4"
printf "c" > "${SRC3}"
printf "d" > "${SRC4}"
# Force destination conflict for first mapping.
CONFLICT1="${TEST_DIR}/Elsbeth (2024) - S01E01 - Pilot.mkv"
printf "existing" > "${CONFLICT1}"
clear_session "${SESSION_FAIL}"
add_payloads "${SESSION_FAIL}" "${SRC3}" "${SRC4}"
curl --fail --silent --show-error \
-X POST "${BASE_URL}/api/session/rename-execute?session_id=${SESSION_FAIL}&confirm=true" \
-o "${TMP_DIR}/rename_fail.json"
curl --fail --silent --show-error \
"${BASE_URL}/api/session/rename-log?session_id=${SESSION_FAIL}" \
-o "${TMP_DIR}/log_fail.json"
cat "${TMP_DIR}/log_fail.json"
python3 - "${TMP_DIR}/log_fail.json" > "${TMP_DIR}/run_id_fail.txt" <<'PY'
import json
import sys
from pathlib import Path
data = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
items = data.get("items", [])
assert len(items) >= 1, "expected at least one logged run"
run = items[0]
assert run.get("executed") is False, "expected executed=false"
assert run.get("preflight_ok") is False, "expected preflight_ok=false"
assert isinstance(run.get("duration_ms"), int), "duration_ms must be int"
assert run["duration_ms"] >= 0, "duration_ms must be >= 0"
print(run["run_id"])
PY
RUN_ID_FAIL="$(cat "${TMP_DIR}/run_id_fail.txt")"
echo
echo "== Feature test 3: run detail endpoint returns item statuses and errors =="
curl --fail --silent --show-error \
"${BASE_URL}/api/session/rename-log/${RUN_ID_FAIL}" \
-o "${TMP_DIR}/run_detail_fail.json"
cat "${TMP_DIR}/run_detail_fail.json"
python3 - "${TMP_DIR}/run_detail_fail.json" <<'PY'
import json
import sys
from pathlib import Path
data = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
assert data.get("executed") is False, "detail should show executed=false"
assert data.get("preflight_ok") is False, "detail should show preflight_ok=false"
assert isinstance(data.get("duration_ms"), int), "detail duration_ms must be int"
items = data.get("items", [])
assert len(items) == 2, "expected 2 item details"
assert any("destination file already exists" in " ".join(i.get("errors", [])) for i in items), \
"expected destination conflict error in item details"
print("rename-log detail validation passed")
PY
echo
echo "All rename log feature tests passed."