338 lines
17 KiB
Python
338 lines
17 KiB
Python
from __future__ import annotations
|
|
|
|
import asyncio
|
|
import sys
|
|
import tempfile
|
|
import threading
|
|
import time
|
|
import unittest
|
|
from pathlib import Path
|
|
|
|
import httpx
|
|
|
|
sys.path.insert(0, str(Path(__file__).resolve().parents[3]))
|
|
|
|
from backend.app.dependencies import get_archive_download_task_service, get_copy_task_service, get_duplicate_task_service, get_file_ops_service, get_history_service, get_move_task_service, get_task_service
|
|
from backend.app.db.history_repository import HistoryRepository
|
|
from backend.app.db.task_repository import TaskRepository
|
|
from backend.app.fs.filesystem_adapter import FilesystemAdapter
|
|
from backend.app.main import app
|
|
from backend.app.security.path_guard import PathGuard
|
|
from backend.app.services.archive_download_task_service import ArchiveDownloadTaskService
|
|
from backend.app.services.copy_task_service import CopyTaskService
|
|
from backend.app.services.duplicate_task_service import DuplicateTaskService
|
|
from backend.app.services.file_ops_service import FileOpsService
|
|
from backend.app.services.history_service import HistoryService
|
|
from backend.app.services.move_task_service import MoveTaskService
|
|
from backend.app.services.task_service import TaskService
|
|
from backend.app.tasks_runner import TaskRunner
|
|
|
|
|
|
class FailingCopyFilesystemAdapter(FilesystemAdapter):
|
|
def copy_file(self, source: str, destination: str, on_progress=None) -> None:
|
|
raise OSError('forced copy failure')
|
|
|
|
|
|
class BlockingArchiveBuildFileOpsService(FileOpsService):
|
|
def __init__(self, *args, entered: threading.Event, release: threading.Event, **kwargs):
|
|
super().__init__(*args, **kwargs)
|
|
self._entered = entered
|
|
self._release = release
|
|
|
|
def _write_download_target_to_zip(self, archive, resolved_target, on_each_item=None) -> None:
|
|
archive.writestr("partial.txt", b"partial")
|
|
self._entered.set()
|
|
self._release.wait(timeout=2.0)
|
|
if on_each_item:
|
|
on_each_item()
|
|
super()._write_download_target_to_zip(archive, resolved_target, on_each_item=on_each_item)
|
|
|
|
|
|
class HistoryApiGoldenTest(unittest.TestCase):
|
|
def setUp(self) -> None:
|
|
self.temp_dir = tempfile.TemporaryDirectory()
|
|
self.root1 = Path(self.temp_dir.name) / 'root1'
|
|
self.root2 = Path(self.temp_dir.name) / 'root2'
|
|
self.root1.mkdir(parents=True, exist_ok=True)
|
|
self.root2.mkdir(parents=True, exist_ok=True)
|
|
db_path = str(Path(self.temp_dir.name) / 'tasks.db')
|
|
self.artifact_root = Path(self.temp_dir.name) / "archive_tmp"
|
|
self.task_repo = TaskRepository(db_path)
|
|
self.history_repo = HistoryRepository(db_path)
|
|
self.path_guard = PathGuard({'storage1': str(self.root1), 'storage2': str(self.root2)})
|
|
self._set_services(FilesystemAdapter())
|
|
|
|
def tearDown(self) -> None:
|
|
app.dependency_overrides.clear()
|
|
self.temp_dir.cleanup()
|
|
|
|
def _set_services(self, filesystem: FilesystemAdapter, file_ops_service: FileOpsService | None = None) -> None:
|
|
runner = TaskRunner(repository=self.task_repo, filesystem=filesystem, history_repository=self.history_repo)
|
|
file_ops_service = file_ops_service or FileOpsService(path_guard=self.path_guard, filesystem=filesystem, history_repository=self.history_repo)
|
|
archive_service = ArchiveDownloadTaskService(
|
|
path_guard=self.path_guard,
|
|
repository=self.task_repo,
|
|
runner=runner,
|
|
history_repository=self.history_repo,
|
|
file_ops_service=file_ops_service,
|
|
artifact_root=self.artifact_root,
|
|
)
|
|
copy_service = CopyTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
|
duplicate_service = DuplicateTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
|
move_service = MoveTaskService(path_guard=self.path_guard, repository=self.task_repo, runner=runner, history_repository=self.history_repo)
|
|
task_service = TaskService(repository=self.task_repo)
|
|
history_service = HistoryService(repository=self.history_repo)
|
|
|
|
async def _override_file_ops_service() -> FileOpsService:
|
|
return file_ops_service
|
|
|
|
async def _override_archive_service() -> ArchiveDownloadTaskService:
|
|
return archive_service
|
|
|
|
async def _override_copy_service() -> CopyTaskService:
|
|
return copy_service
|
|
|
|
async def _override_duplicate_service() -> DuplicateTaskService:
|
|
return duplicate_service
|
|
|
|
async def _override_move_service() -> MoveTaskService:
|
|
return move_service
|
|
|
|
async def _override_task_service() -> TaskService:
|
|
return task_service
|
|
|
|
async def _override_history_service() -> HistoryService:
|
|
return history_service
|
|
|
|
app.dependency_overrides[get_file_ops_service] = _override_file_ops_service
|
|
app.dependency_overrides[get_archive_download_task_service] = _override_archive_service
|
|
app.dependency_overrides[get_copy_task_service] = _override_copy_service
|
|
app.dependency_overrides[get_duplicate_task_service] = _override_duplicate_service
|
|
app.dependency_overrides[get_move_task_service] = _override_move_service
|
|
app.dependency_overrides[get_task_service] = _override_task_service
|
|
app.dependency_overrides[get_history_service] = _override_history_service
|
|
|
|
def _request(self, method: str, url: str, payload: dict | None = None) -> httpx.Response:
|
|
async def _run() -> httpx.Response:
|
|
transport = httpx.ASGITransport(app=app)
|
|
async with httpx.AsyncClient(transport=transport, base_url='http://testserver') as client:
|
|
if method == 'GET':
|
|
return await client.get(url)
|
|
return await client.post(url, json=payload)
|
|
|
|
return asyncio.run(_run())
|
|
|
|
def _wait_task(self, task_id: str, timeout_s: float = 2.0) -> dict:
|
|
deadline = time.time() + timeout_s
|
|
while time.time() < deadline:
|
|
response = self._request('GET', f'/api/tasks/{task_id}')
|
|
body = response.json()
|
|
if body['status'] in {'completed', 'failed', 'ready', 'cancelled'}:
|
|
return body
|
|
time.sleep(0.02)
|
|
self.fail('task did not reach terminal state in time')
|
|
|
|
def test_get_history_empty_list(self) -> None:
|
|
response = self._request('GET', '/api/history')
|
|
self.assertEqual(response.status_code, 200)
|
|
self.assertEqual(response.json(), {'items': []})
|
|
|
|
def test_get_history_list_shape_and_sorting(self) -> None:
|
|
self.history_repo.insert_entry_for_testing(
|
|
{
|
|
'id': 'old', 'operation': 'mkdir', 'status': 'completed', 'source': None, 'destination': None,
|
|
'path': 'storage1/old', 'error_code': None, 'error_message': None,
|
|
'created_at': '2026-03-10T10:00:00Z', 'finished_at': '2026-03-10T10:00:00Z',
|
|
}
|
|
)
|
|
self.history_repo.insert_entry_for_testing(
|
|
{
|
|
'id': 'new', 'operation': 'move', 'status': 'queued', 'source': 'storage1/a.txt', 'destination': 'storage1/b.txt',
|
|
'path': None, 'error_code': None, 'error_message': None,
|
|
'created_at': '2026-03-10T10:01:00Z', 'finished_at': None,
|
|
}
|
|
)
|
|
|
|
response = self._request('GET', '/api/history')
|
|
self.assertEqual(response.status_code, 200)
|
|
self.assertEqual(response.json(), {
|
|
'items': [
|
|
{
|
|
'id': 'new', 'operation': 'move', 'status': 'queued', 'source': 'storage1/a.txt', 'destination': 'storage1/b.txt',
|
|
'path': None, 'error_code': None, 'error_message': None,
|
|
'created_at': '2026-03-10T10:01:00Z', 'finished_at': None,
|
|
},
|
|
{
|
|
'id': 'old', 'operation': 'mkdir', 'status': 'completed', 'source': None, 'destination': None,
|
|
'path': 'storage1/old', 'error_code': None, 'error_message': None,
|
|
'created_at': '2026-03-10T10:00:00Z', 'finished_at': '2026-03-10T10:00:00Z',
|
|
},
|
|
]
|
|
})
|
|
|
|
def test_mkdir_success_history_item(self) -> None:
|
|
response = self._request('POST', '/api/files/mkdir', {'parent_path': 'storage1', 'name': 'newdir'})
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertEqual(history[0]['operation'], 'mkdir')
|
|
self.assertEqual(history[0]['status'], 'completed')
|
|
self.assertEqual(history[0]['path'], 'storage1/newdir')
|
|
|
|
def test_rename_failure_history_item(self) -> None:
|
|
response = self._request('POST', '/api/files/rename', {'path': 'storage1/missing.txt', 'new_name': 'renamed.txt'})
|
|
self.assertEqual(response.status_code, 404)
|
|
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertEqual(history[0]['operation'], 'rename')
|
|
self.assertEqual(history[0]['status'], 'failed')
|
|
self.assertEqual(history[0]['error_code'], 'path_not_found')
|
|
|
|
def test_copy_completed_history_item(self) -> None:
|
|
src = self.root1 / 'source.txt'
|
|
src.write_text('hello', encoding='utf-8')
|
|
response = self._request('POST', '/api/files/copy', {'source': 'storage1/source.txt', 'destination': 'storage1/copied.txt'})
|
|
self.assertEqual(response.status_code, 202)
|
|
self._wait_task(response.json()['task_id'])
|
|
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertEqual(history[0]['operation'], 'copy')
|
|
self.assertEqual(history[0]['status'], 'completed')
|
|
self.assertEqual(history[0]['source'], 'storage1/source.txt')
|
|
self.assertEqual(history[0]['destination'], 'storage1/copied.txt')
|
|
|
|
def test_move_failed_history_item(self) -> None:
|
|
src = self.root1 / 'source.txt'
|
|
src.write_text('hello', encoding='utf-8')
|
|
self._set_services(FailingCopyFilesystemAdapter())
|
|
|
|
response = self._request('POST', '/api/files/move', {'source': 'storage1/source.txt', 'destination': 'storage2/moved.txt'})
|
|
self.assertEqual(response.status_code, 202)
|
|
self._wait_task(response.json()['task_id'])
|
|
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertEqual(history[0]['operation'], 'move')
|
|
self.assertEqual(history[0]['status'], 'failed')
|
|
self.assertEqual(history[0]['error_code'], 'io_error')
|
|
|
|
def test_duplicate_completed_history_item(self) -> None:
|
|
(self.root1 / 'report.txt').write_text('hello', encoding='utf-8')
|
|
|
|
response = self._request('POST', '/api/files/duplicate', {'paths': ['storage1/report.txt']})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
self._wait_task(response.json()['task_id'])
|
|
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertEqual(history[0]['operation'], 'duplicate')
|
|
self.assertEqual(history[0]['status'], 'completed')
|
|
self.assertEqual(history[0]['source'], 'storage1/report.txt')
|
|
self.assertEqual(history[0]['destination'], 'storage1/report copy.txt')
|
|
|
|
def test_single_file_download_writes_ready_history_item(self) -> None:
|
|
(self.root1 / 'report.txt').write_text('hello download', encoding='utf-8')
|
|
|
|
response = self._request('GET', '/api/files/download?path=storage1/report.txt')
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertEqual(history[0]['operation'], 'download')
|
|
self.assertEqual(history[0]['status'], 'ready')
|
|
self.assertEqual(history[0]['source'], 'single_file')
|
|
self.assertEqual(history[0]['path'], 'storage1/report.txt')
|
|
self.assertEqual(history[0]['destination'], 'report.txt')
|
|
self.assertEqual(history[0]['error_code'], None)
|
|
self.assertEqual(history[0]['error_message'], None)
|
|
|
|
def test_single_directory_zip_download_writes_ready_history_item(self) -> None:
|
|
(self.root1 / 'docs').mkdir()
|
|
(self.root1 / 'docs' / 'a.txt').write_text('A', encoding='utf-8')
|
|
|
|
response = self._request('POST', '/api/files/download/archive-prepare', {'paths': ['storage1/docs']})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
self._wait_task(response.json()['task_id'])
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertEqual(history[0]['operation'], 'download')
|
|
self.assertEqual(history[0]['status'], 'ready')
|
|
self.assertEqual(history[0]['source'], 'single_directory_zip')
|
|
self.assertEqual(history[0]['path'], 'storage1/docs')
|
|
self.assertEqual(history[0]['destination'], 'docs.zip')
|
|
|
|
def test_multi_mixed_zip_download_writes_ready_history_item(self) -> None:
|
|
(self.root1 / 'readme.txt').write_text('R', encoding='utf-8')
|
|
(self.root1 / 'photos').mkdir()
|
|
(self.root1 / 'photos' / 'img.txt').write_text('P', encoding='utf-8')
|
|
|
|
response = self._request('POST', '/api/files/download/archive-prepare', {'paths': ['storage1/readme.txt', 'storage1/photos']})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
self._wait_task(response.json()['task_id'])
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertEqual(history[0]['operation'], 'download')
|
|
self.assertEqual(history[0]['status'], 'ready')
|
|
self.assertEqual(history[0]['source'], 'multi_zip')
|
|
self.assertEqual(history[0]['path'], 'storage1/readme.txt, storage1/photos')
|
|
self.assertRegex(history[0]['destination'], r'^kodidownload-\d{8}-\d{6}\.zip$')
|
|
|
|
def test_download_preflight_failure_writes_preflight_failed_history_item(self) -> None:
|
|
target = self.root1 / 'real.txt'
|
|
target.write_text('x', encoding='utf-8')
|
|
(self.root1 / 'docs').mkdir()
|
|
(self.root1 / 'docs' / 'link.txt').symlink_to(target)
|
|
|
|
response = self._request('POST', '/api/files/download/archive-prepare', {'paths': ['storage1/docs']})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
self._wait_task(response.json()['task_id'])
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertEqual(history[0]['operation'], 'download')
|
|
self.assertEqual(history[0]['status'], 'failed')
|
|
self.assertEqual(history[0]['source'], 'single_directory_zip')
|
|
self.assertEqual(history[0]['path'], 'storage1/docs')
|
|
self.assertEqual(history[0]['destination'], 'docs.zip')
|
|
self.assertEqual(history[0]['error_code'], 'download_preflight_failed')
|
|
self.assertEqual(history[0]['error_message'], 'Zip download preflight failed')
|
|
|
|
def test_download_cancellation_writes_cancelled_history_item(self) -> None:
|
|
entered = threading.Event()
|
|
release = threading.Event()
|
|
file_ops_service = BlockingArchiveBuildFileOpsService(
|
|
path_guard=self.path_guard,
|
|
filesystem=FilesystemAdapter(),
|
|
history_repository=self.history_repo,
|
|
entered=entered,
|
|
release=release,
|
|
)
|
|
self._set_services(FilesystemAdapter(), file_ops_service=file_ops_service)
|
|
(self.root1 / 'docs').mkdir()
|
|
(self.root1 / 'docs' / 'a.txt').write_text('A', encoding='utf-8')
|
|
|
|
response = self._request('POST', '/api/files/download/archive-prepare', {'paths': ['storage1/docs']})
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
self.assertTrue(entered.wait(timeout=2.0))
|
|
cancel = self._request('POST', f"/api/files/download/archive/{response.json()['task_id']}/cancel")
|
|
release.set()
|
|
self._wait_task(response.json()['task_id'])
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
|
|
self.assertEqual(cancel.status_code, 200)
|
|
self.assertEqual(history[0]['operation'], 'download')
|
|
self.assertEqual(history[0]['status'], 'cancelled')
|
|
self.assertEqual(history[0]['source'], 'single_directory_zip')
|
|
self.assertEqual(history[0]['path'], 'storage1/docs')
|
|
self.assertEqual(history[0]['destination'], 'docs.zip')
|
|
self.assertEqual(history[0]['error_code'], None)
|
|
self.assertEqual(history[0]['error_message'], None)
|
|
|
|
def test_download_history_uses_server_certain_statuses_only(self) -> None:
|
|
(self.root1 / 'report.txt').write_text('hello download', encoding='utf-8')
|
|
|
|
response = self._request('GET', '/api/files/download?path=storage1/report.txt')
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
history = self._request('GET', '/api/history').json()['items']
|
|
self.assertIn(history[0]['status'], {'requested', 'ready', 'preflight_failed', 'failed', 'cancelled'})
|
|
self.assertNotIn(history[0]['status'], {'completed', 'downloaded', 'saved'})
|