itcloud/backend/src/app/api/v1/batch.py

198 lines
5.4 KiB
Python

"""Batch operations API routes."""
import os
from pathlib import Path
from urllib.parse import quote
from fastapi import APIRouter, BackgroundTasks, status
from fastapi.responses import FileResponse
from app.api.dependencies import CurrentUser, DatabaseSession, S3ClientDep
from app.api.schemas import (
BatchDeleteRequest,
BatchDeleteResponse,
BatchDownloadRequest,
BatchMoveRequest,
BatchMoveResponse,
)
from app.services.batch_operations_service import BatchOperationsService
router = APIRouter(prefix="/batch", tags=["batch"])
def make_content_disposition(filename: str) -> str:
"""
Create Content-Disposition header value with proper encoding for non-ASCII filenames.
Uses RFC 5987/2231 encoding to support UTF-8 filenames.
Args:
filename: Original filename (may contain non-ASCII characters)
Returns:
Properly formatted Content-Disposition header value
"""
# ASCII-safe fallback (replace non-ASCII with underscore)
ascii_filename = filename.encode("ascii", errors="replace").decode("ascii")
# UTF-8 encoded filename (RFC 5987)
utf8_filename = quote(filename.encode("utf-8"))
# Return both for maximum compatibility
return f"attachment; filename=\"{ascii_filename}\"; filename*=UTF-8''{utf8_filename}"
@router.post("/delete", response_model=BatchDeleteResponse)
async def batch_delete(
request: BatchDeleteRequest,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
):
"""
Delete multiple assets.
Args:
request: Batch delete request
current_user: Current authenticated user
session: Database session
s3_client: S3 client
Returns:
Deletion statistics
"""
batch_service = BatchOperationsService(session, s3_client)
result = await batch_service.delete_assets_batch(
user_id=current_user.id,
asset_ids=request.asset_ids,
)
return result
@router.post("/move", response_model=BatchMoveResponse)
async def batch_move(
request: BatchMoveRequest,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
):
"""
Move multiple assets to a folder.
Args:
request: Batch move request
current_user: Current authenticated user
session: Database session
s3_client: S3 client
Returns:
Move statistics
"""
batch_service = BatchOperationsService(session, s3_client)
result = await batch_service.move_assets_batch(
user_id=current_user.id,
asset_ids=request.asset_ids,
target_folder_id=request.folder_id,
)
return result
@router.post("/download")
async def batch_download(
request: BatchDownloadRequest,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
background_tasks: BackgroundTasks,
):
"""
Download multiple assets as a ZIP archive using streaming.
Uses temp file and FileResponse to avoid loading entire ZIP into memory.
Temp file is automatically cleaned up after response is sent.
Args:
request: Batch download request
current_user: Current authenticated user
session: Database session
s3_client: S3 client
background_tasks: Background tasks for cleanup
Returns:
ZIP file response
"""
batch_service = BatchOperationsService(session, s3_client)
temp_zip_path, filename = await batch_service.download_assets_batch(
user_id=current_user.id,
asset_ids=request.asset_ids,
)
# Schedule temp file cleanup after response is sent
def cleanup_temp_file():
try:
Path(temp_zip_path).unlink(missing_ok=True)
except Exception:
pass
background_tasks.add_task(cleanup_temp_file)
# Return file using streaming FileResponse
return FileResponse(
path=temp_zip_path,
media_type="application/zip",
filename=filename,
headers={
"Content-Disposition": make_content_disposition(filename),
},
)
@router.get("/folders/{folder_id}/download")
async def download_folder(
folder_id: str,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
background_tasks: BackgroundTasks,
):
"""
Download all assets in a folder as a ZIP archive using streaming.
Uses temp file and FileResponse to avoid loading entire ZIP into memory.
Temp file is automatically cleaned up after response is sent.
Args:
folder_id: Folder ID
current_user: Current authenticated user
session: Database session
s3_client: S3 client
background_tasks: Background tasks for cleanup
Returns:
ZIP file response
"""
batch_service = BatchOperationsService(session, s3_client)
temp_zip_path, filename = await batch_service.download_folder(
user_id=current_user.id,
folder_id=folder_id,
)
# Schedule temp file cleanup after response is sent
def cleanup_temp_file():
try:
Path(temp_zip_path).unlink(missing_ok=True)
except Exception:
pass
background_tasks.add_task(cleanup_temp_file)
# Return file using streaming FileResponse
return FileResponse(
path=temp_zip_path,
media_type="application/zip",
filename=filename,
headers={
"Content-Disposition": make_content_disposition(filename),
},
)