554 lines
19 KiB
Python
554 lines
19 KiB
Python
import json
|
|
import os
|
|
import sqlite3
|
|
from pathlib import Path
|
|
|
|
from app.config import APP_DATA_DIR
|
|
|
|
|
|
class SessionService:
|
|
def __init__(self) -> None:
|
|
self._db_path = Path(APP_DATA_DIR) / "session_state.sqlite3"
|
|
self._db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
self._init_db()
|
|
|
|
def _connect(self) -> sqlite3.Connection:
|
|
conn = sqlite3.connect(self._db_path)
|
|
conn.row_factory = sqlite3.Row
|
|
return conn
|
|
|
|
def _init_db(self) -> None:
|
|
with self._connect() as conn:
|
|
conn.execute(
|
|
"""
|
|
CREATE TABLE IF NOT EXISTS selected_episodes (
|
|
selection_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
session_id TEXT NOT NULL,
|
|
position INTEGER NOT NULL,
|
|
payload_json TEXT NOT NULL
|
|
)
|
|
"""
|
|
)
|
|
conn.execute(
|
|
"""
|
|
CREATE UNIQUE INDEX IF NOT EXISTS idx_selected_episodes_session_position
|
|
ON selected_episodes(session_id, position)
|
|
"""
|
|
)
|
|
conn.execute(
|
|
"""
|
|
CREATE INDEX IF NOT EXISTS idx_selected_episodes_session
|
|
ON selected_episodes(session_id)
|
|
"""
|
|
)
|
|
conn.execute(
|
|
"""
|
|
CREATE TABLE IF NOT EXISTS selected_files (
|
|
selection_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
session_id TEXT NOT NULL,
|
|
position INTEGER NOT NULL,
|
|
payload_json TEXT NOT NULL
|
|
)
|
|
"""
|
|
)
|
|
conn.execute(
|
|
"""
|
|
CREATE UNIQUE INDEX IF NOT EXISTS idx_selected_files_session_position
|
|
ON selected_files(session_id, position)
|
|
"""
|
|
)
|
|
conn.execute(
|
|
"""
|
|
CREATE INDEX IF NOT EXISTS idx_selected_files_session
|
|
ON selected_files(session_id)
|
|
"""
|
|
)
|
|
|
|
def list_selected_episodes(self, session_id: str) -> list[dict]:
|
|
with self._connect() as conn:
|
|
rows = conn.execute(
|
|
"""
|
|
SELECT selection_id, position, payload_json
|
|
FROM selected_episodes
|
|
WHERE session_id = ?
|
|
ORDER BY position ASC
|
|
""",
|
|
(session_id,),
|
|
).fetchall()
|
|
|
|
items = []
|
|
for row in rows:
|
|
payload = json.loads(row["payload_json"])
|
|
items.append(
|
|
{
|
|
"selection_id": row["selection_id"],
|
|
"position": row["position"],
|
|
"episode": payload,
|
|
}
|
|
)
|
|
return items
|
|
|
|
def add_selected_episodes(self, session_id: str, items: list[dict]) -> list[dict]:
|
|
if not items:
|
|
return self.list_selected_episodes(session_id)
|
|
|
|
with self._connect() as conn:
|
|
current_max = conn.execute(
|
|
"""
|
|
SELECT COALESCE(MAX(position), -1) AS max_position
|
|
FROM selected_episodes
|
|
WHERE session_id = ?
|
|
""",
|
|
(session_id,),
|
|
).fetchone()
|
|
next_position = int(current_max["max_position"]) + 1
|
|
|
|
for item in items:
|
|
conn.execute(
|
|
"""
|
|
INSERT INTO selected_episodes (session_id, position, payload_json)
|
|
VALUES (?, ?, ?)
|
|
""",
|
|
(session_id, next_position, json.dumps(item, ensure_ascii=True)),
|
|
)
|
|
next_position += 1
|
|
|
|
return self.list_selected_episodes(session_id)
|
|
|
|
def clear_selected_episodes(self, session_id: str) -> None:
|
|
with self._connect() as conn:
|
|
conn.execute(
|
|
"DELETE FROM selected_episodes WHERE session_id = ?",
|
|
(session_id,),
|
|
)
|
|
|
|
def remove_selected_episode(self, session_id: str, selection_id: int) -> list[dict]:
|
|
with self._connect() as conn:
|
|
conn.execute(
|
|
"""
|
|
DELETE FROM selected_episodes
|
|
WHERE session_id = ? AND selection_id = ?
|
|
""",
|
|
(session_id, selection_id),
|
|
)
|
|
return self._compact_positions(session_id)
|
|
|
|
def reorder_selected_episodes(
|
|
self,
|
|
session_id: str,
|
|
from_index: int,
|
|
to_index: int,
|
|
) -> list[dict]:
|
|
current_items = self.list_selected_episodes(session_id)
|
|
|
|
if from_index < 0 or from_index >= len(current_items):
|
|
raise ValueError("from_index out of range")
|
|
if to_index < 0 or to_index >= len(current_items):
|
|
raise ValueError("to_index out of range")
|
|
if from_index == to_index:
|
|
return current_items
|
|
|
|
moved = current_items.pop(from_index)
|
|
current_items.insert(to_index, moved)
|
|
|
|
with self._connect() as conn:
|
|
# Two-phase update avoids transient UNIQUE conflicts on (session_id, position).
|
|
for position, item in enumerate(current_items):
|
|
conn.execute(
|
|
"""
|
|
UPDATE selected_episodes
|
|
SET position = ?
|
|
WHERE session_id = ? AND selection_id = ?
|
|
""",
|
|
(-(position + 1), session_id, item["selection_id"]),
|
|
)
|
|
|
|
for position, item in enumerate(current_items):
|
|
conn.execute(
|
|
"""
|
|
UPDATE selected_episodes
|
|
SET position = ?
|
|
WHERE session_id = ? AND selection_id = ?
|
|
""",
|
|
(position, session_id, item["selection_id"]),
|
|
)
|
|
|
|
return self.list_selected_episodes(session_id)
|
|
|
|
def _compact_positions(self, session_id: str) -> list[dict]:
|
|
items = self.list_selected_episodes(session_id)
|
|
with self._connect() as conn:
|
|
for position, item in enumerate(items):
|
|
if item["position"] == position:
|
|
continue
|
|
conn.execute(
|
|
"""
|
|
UPDATE selected_episodes
|
|
SET position = ?
|
|
WHERE session_id = ? AND selection_id = ?
|
|
""",
|
|
(position, session_id, item["selection_id"]),
|
|
)
|
|
return self.list_selected_episodes(session_id)
|
|
|
|
def list_selected_files(self, session_id: str) -> list[dict]:
|
|
with self._connect() as conn:
|
|
rows = conn.execute(
|
|
"""
|
|
SELECT selection_id, position, payload_json
|
|
FROM selected_files
|
|
WHERE session_id = ?
|
|
ORDER BY position ASC
|
|
""",
|
|
(session_id,),
|
|
).fetchall()
|
|
|
|
items = []
|
|
for row in rows:
|
|
payload = json.loads(row["payload_json"])
|
|
items.append(
|
|
{
|
|
"selection_id": row["selection_id"],
|
|
"position": row["position"],
|
|
"file": payload,
|
|
}
|
|
)
|
|
return items
|
|
|
|
def add_selected_files(self, session_id: str, items: list[dict]) -> list[dict]:
|
|
if not items:
|
|
return self.list_selected_files(session_id)
|
|
|
|
with self._connect() as conn:
|
|
current_max = conn.execute(
|
|
"""
|
|
SELECT COALESCE(MAX(position), -1) AS max_position
|
|
FROM selected_files
|
|
WHERE session_id = ?
|
|
""",
|
|
(session_id,),
|
|
).fetchone()
|
|
next_position = int(current_max["max_position"]) + 1
|
|
|
|
for item in items:
|
|
conn.execute(
|
|
"""
|
|
INSERT INTO selected_files (session_id, position, payload_json)
|
|
VALUES (?, ?, ?)
|
|
""",
|
|
(session_id, next_position, json.dumps(item, ensure_ascii=True)),
|
|
)
|
|
next_position += 1
|
|
|
|
return self.list_selected_files(session_id)
|
|
|
|
def clear_selected_files(self, session_id: str) -> None:
|
|
with self._connect() as conn:
|
|
conn.execute(
|
|
"DELETE FROM selected_files WHERE session_id = ?",
|
|
(session_id,),
|
|
)
|
|
|
|
def remove_selected_file(self, session_id: str, selection_id: int) -> list[dict]:
|
|
with self._connect() as conn:
|
|
conn.execute(
|
|
"""
|
|
DELETE FROM selected_files
|
|
WHERE session_id = ? AND selection_id = ?
|
|
""",
|
|
(session_id, selection_id),
|
|
)
|
|
return self._compact_file_positions(session_id)
|
|
|
|
def reorder_selected_files(
|
|
self,
|
|
session_id: str,
|
|
from_index: int,
|
|
to_index: int,
|
|
) -> list[dict]:
|
|
current_items = self.list_selected_files(session_id)
|
|
|
|
if from_index < 0 or from_index >= len(current_items):
|
|
raise ValueError("from_index out of range")
|
|
if to_index < 0 or to_index >= len(current_items):
|
|
raise ValueError("to_index out of range")
|
|
if from_index == to_index:
|
|
return current_items
|
|
|
|
moved = current_items.pop(from_index)
|
|
current_items.insert(to_index, moved)
|
|
|
|
with self._connect() as conn:
|
|
# Two-phase update avoids transient UNIQUE conflicts on (session_id, position).
|
|
for position, item in enumerate(current_items):
|
|
conn.execute(
|
|
"""
|
|
UPDATE selected_files
|
|
SET position = ?
|
|
WHERE session_id = ? AND selection_id = ?
|
|
""",
|
|
(-(position + 1), session_id, item["selection_id"]),
|
|
)
|
|
|
|
for position, item in enumerate(current_items):
|
|
conn.execute(
|
|
"""
|
|
UPDATE selected_files
|
|
SET position = ?
|
|
WHERE session_id = ? AND selection_id = ?
|
|
""",
|
|
(position, session_id, item["selection_id"]),
|
|
)
|
|
|
|
return self.list_selected_files(session_id)
|
|
|
|
def _compact_file_positions(self, session_id: str) -> list[dict]:
|
|
items = self.list_selected_files(session_id)
|
|
with self._connect() as conn:
|
|
for position, item in enumerate(items):
|
|
if item["position"] == position:
|
|
continue
|
|
conn.execute(
|
|
"""
|
|
UPDATE selected_files
|
|
SET position = ?
|
|
WHERE session_id = ? AND selection_id = ?
|
|
""",
|
|
(position, session_id, item["selection_id"]),
|
|
)
|
|
return self.list_selected_files(session_id)
|
|
|
|
def build_mapping_preview(self, session_id: str) -> dict:
|
|
episodes = self.list_selected_episodes(session_id)
|
|
files = self.list_selected_files(session_id)
|
|
|
|
if len(episodes) != len(files):
|
|
raise ValueError(
|
|
"Selected episodes and selected files count mismatch: "
|
|
f"{len(episodes)} episodes vs {len(files)} files"
|
|
)
|
|
|
|
mappings = []
|
|
for index, (episode_item, file_item) in enumerate(zip(episodes, files)):
|
|
mappings.append(
|
|
{
|
|
"index": index,
|
|
"episode_selection_id": episode_item["selection_id"],
|
|
"file_selection_id": file_item["selection_id"],
|
|
"episode": episode_item["episode"],
|
|
"file": file_item["file"],
|
|
}
|
|
)
|
|
|
|
return {
|
|
"session_id": session_id,
|
|
"counts": {
|
|
"episodes": len(episodes),
|
|
"files": len(files),
|
|
},
|
|
"mappings": mappings,
|
|
}
|
|
|
|
def build_filename_preview(self, session_id: str) -> dict:
|
|
mapping_preview = self.build_mapping_preview(session_id)
|
|
previews = []
|
|
|
|
for item in mapping_preview["mappings"]:
|
|
episode = item["episode"]
|
|
file_payload = item["file"]
|
|
|
|
series = (
|
|
episode.get("series")
|
|
or episode.get("series_name")
|
|
or episode.get("show")
|
|
or "Unknown Series"
|
|
)
|
|
year = episode.get("year") or "0000"
|
|
title = episode.get("title") or "Untitled"
|
|
|
|
season_raw = episode.get("season_number") or episode.get("season") or 0
|
|
episode_raw = episode.get("episode_number") or episode.get("number") or 0
|
|
|
|
try:
|
|
season_number = int(season_raw)
|
|
except (TypeError, ValueError):
|
|
season_number = 0
|
|
try:
|
|
episode_number = int(episode_raw)
|
|
except (TypeError, ValueError):
|
|
episode_number = 0
|
|
|
|
source_name = file_payload.get("name") or file_payload.get("path") or ""
|
|
ext = Path(source_name).suffix
|
|
|
|
proposed_filename = (
|
|
f"{series} ({year}) - S{season_number:02}E{episode_number:02} - {title}{ext}"
|
|
)
|
|
|
|
previews.append(
|
|
{
|
|
"index": item["index"],
|
|
"episode_selection_id": item["episode_selection_id"],
|
|
"file_selection_id": item["file_selection_id"],
|
|
"episode": episode,
|
|
"file": file_payload,
|
|
"proposed_filename": proposed_filename,
|
|
}
|
|
)
|
|
|
|
return {
|
|
"session_id": mapping_preview["session_id"],
|
|
"counts": mapping_preview["counts"],
|
|
"template": "{series} ({year}) - S{season:02}E{episode:02} - {title}{ext}",
|
|
"items": previews,
|
|
}
|
|
|
|
def execute_rename(self, session_id: str, confirm: bool) -> dict:
|
|
if not confirm:
|
|
raise ValueError("confirm=true is required to execute rename")
|
|
|
|
preview = self.build_filename_preview(session_id)
|
|
allowed_roots = self._allowed_media_roots()
|
|
preflight_items = []
|
|
preflight_errors = 0
|
|
|
|
for item in preview["items"]:
|
|
source_path_str = str(item["file"].get("path") or "").strip()
|
|
proposed_filename = item["proposed_filename"]
|
|
source_path = Path(source_path_str)
|
|
|
|
destination_path = source_path.with_name(proposed_filename) if source_path_str else Path("")
|
|
errors = self._preflight_errors(
|
|
source_path=source_path,
|
|
destination_path=destination_path,
|
|
proposed_filename=proposed_filename,
|
|
allowed_roots=allowed_roots,
|
|
)
|
|
|
|
status = "ready"
|
|
if errors:
|
|
status = "preflight_error"
|
|
preflight_errors += 1
|
|
|
|
preflight_items.append(
|
|
{
|
|
"index": item["index"],
|
|
"episode_selection_id": item["episode_selection_id"],
|
|
"file_selection_id": item["file_selection_id"],
|
|
"source_path": source_path_str,
|
|
"destination_path": str(destination_path) if source_path_str else "",
|
|
"proposed_filename": proposed_filename,
|
|
"status": status,
|
|
"errors": errors,
|
|
}
|
|
)
|
|
|
|
if preflight_errors > 0:
|
|
return {
|
|
"session_id": session_id,
|
|
"confirm": confirm,
|
|
"executed": False,
|
|
"preflight_ok": False,
|
|
"counts": preview["counts"],
|
|
"items": preflight_items,
|
|
}
|
|
|
|
results = []
|
|
for item in preflight_items:
|
|
source_path = Path(item["source_path"])
|
|
destination_path = Path(item["destination_path"])
|
|
os.replace(str(source_path), str(destination_path))
|
|
results.append(
|
|
{
|
|
**item,
|
|
"status": "renamed",
|
|
"errors": [],
|
|
}
|
|
)
|
|
|
|
return {
|
|
"session_id": session_id,
|
|
"confirm": confirm,
|
|
"executed": True,
|
|
"preflight_ok": True,
|
|
"counts": preview["counts"],
|
|
"items": results,
|
|
}
|
|
|
|
def _allowed_media_roots(self) -> list[Path]:
|
|
raw = os.getenv("ALLOWED_MEDIA_ROOTS", "").strip()
|
|
if raw:
|
|
candidates = [p.strip() for p in raw.split(",") if p.strip()]
|
|
else:
|
|
media_root = os.getenv("MEDIA_ROOT", "").strip()
|
|
if media_root:
|
|
candidates = [media_root]
|
|
else:
|
|
candidates = [
|
|
"/Volumes/8TB/Shared_Folders/TV_Shows",
|
|
"/Volumes/8TB_RAID1/Shared_Folders/Library/TV_Shows",
|
|
]
|
|
|
|
roots = []
|
|
for candidate in candidates:
|
|
try:
|
|
roots.append(Path(candidate).resolve())
|
|
except Exception:
|
|
continue
|
|
return roots
|
|
|
|
def _is_within_allowed_roots(self, path: Path, allowed_roots: list[Path]) -> bool:
|
|
try:
|
|
resolved = path.resolve()
|
|
except Exception:
|
|
return False
|
|
|
|
for root in allowed_roots:
|
|
try:
|
|
resolved.relative_to(root)
|
|
return True
|
|
except ValueError:
|
|
continue
|
|
return False
|
|
|
|
def _preflight_errors(
|
|
self,
|
|
source_path: Path,
|
|
destination_path: Path,
|
|
proposed_filename: str,
|
|
allowed_roots: list[Path],
|
|
) -> list[str]:
|
|
errors = []
|
|
|
|
if not str(source_path):
|
|
errors.append("source path missing")
|
|
return errors
|
|
|
|
if ".." in source_path.parts:
|
|
errors.append("source path traversal is not allowed")
|
|
if ".." in Path(proposed_filename).parts:
|
|
errors.append("destination filename traversal is not allowed")
|
|
if Path(proposed_filename).name != proposed_filename:
|
|
errors.append("destination filename must not contain path separators")
|
|
|
|
if not self._is_within_allowed_roots(source_path, allowed_roots):
|
|
errors.append("source path is outside allowed media roots")
|
|
if not self._is_within_allowed_roots(destination_path, allowed_roots):
|
|
errors.append("destination path is outside allowed media roots")
|
|
|
|
if not source_path.exists():
|
|
errors.append("source file does not exist")
|
|
if source_path.exists() and not source_path.is_file():
|
|
errors.append("source path is not a file")
|
|
|
|
if source_path == destination_path:
|
|
errors.append("source and destination paths are equal")
|
|
|
|
if destination_path.exists():
|
|
errors.append("destination file already exists")
|
|
|
|
if not destination_path.parent.exists():
|
|
errors.append("destination parent directory does not exist")
|
|
|
|
return errors
|