- Docker Deployment Fixes: - Switch base images to docker.m.daocloud.io to resolve registry 401 errors - Add Postgres and Redis services to docker-compose.traefik.yml - Fix frontend build: replace missing icons (Globe->Location, Chart->TrendCharts) - Fix frontend build: resolve pnpm CI/TTY issues and frozen lockfile errors - Add missing backend dependencies (sqlalchemy, psycopg2, redis-py, celery, docker-py) in pixi.toml - Ensure database tables are created on startup (lifespan event) - Backend Internationalization (i18n): - Add backend/app/core/i18n.py for locale handling - Update API endpoints (jobs, tasks, uploads, results) to return localized messages - Support 'Accept-Language' header (en/zh) - Documentation: - Update DOCKER_DEPLOYMENT.md with new architecture and troubleshooting - Update AGENTS.md with latest stack details and deployment steps - Update @fix_plan.md status Co-Authored-By: Claude <noreply@anthropic.com>
78 lines
2.3 KiB
Python
78 lines
2.3 KiB
Python
"""结果下载 API"""
|
||
from fastapi import APIRouter, HTTPException, Response, Depends
|
||
from fastapi.responses import FileResponse
|
||
from sqlalchemy.orm import Session
|
||
from pathlib import Path
|
||
import tarfile
|
||
import io
|
||
import shutil
|
||
|
||
from ...database import get_db
|
||
from ...models.job import Job, JobStatus
|
||
from ...config import settings
|
||
from ...core.i18n import I18n, get_i18n
|
||
|
||
router = APIRouter()
|
||
|
||
RESULTS_DIR = Path(settings.RESULTS_DIR)
|
||
|
||
|
||
@router.get("/{job_id}/download")
|
||
async def download_results(job_id: str, db: Session = Depends(get_db), i18n: I18n = Depends(get_i18n)):
|
||
"""下载任务结果(打包为 .tar.gz)"""
|
||
job = db.query(Job).filter(Job.id == job_id).first()
|
||
if not job:
|
||
raise HTTPException(status_code=404, detail=i18n.t("job_not_found"))
|
||
|
||
if job.status != JobStatus.COMPLETED:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=i18n.t("job_not_completed", status=job.status)
|
||
)
|
||
|
||
job_output_dir = RESULTS_DIR / job_id
|
||
if not job_output_dir.exists():
|
||
raise HTTPException(status_code=404, detail=i18n.t("results_not_found"))
|
||
|
||
# 创建 tar.gz 文件到内存
|
||
tar_buffer = io.BytesIO()
|
||
with tarfile.open(fileobj=tar_buffer, mode="w:gz") as tar:
|
||
for file_path in job_output_dir.rglob("*"):
|
||
if file_path.is_file():
|
||
arcname = file_path.relative_to(job_output_dir)
|
||
tar.add(file_path, arcname=arcname)
|
||
|
||
tar_buffer.seek(0)
|
||
|
||
return Response(
|
||
content=tar_buffer.read(),
|
||
media_type="application/gzip",
|
||
headers={
|
||
"Content-Disposition": f"attachment; filename=bttoxin_{job_id}.tar.gz"
|
||
}
|
||
)
|
||
|
||
|
||
@router.delete("/{job_id}")
|
||
async def delete_job(job_id: str, db: Session = Depends(get_db), i18n: I18n = Depends(get_i18n)):
|
||
"""删除任务及其结果"""
|
||
job = db.query(Job).filter(Job.id == job_id).first()
|
||
if not job:
|
||
raise HTTPException(status_code=404, detail=i18n.t("job_not_found"))
|
||
|
||
# 删除磁盘上的文件
|
||
job_input_dir = Path(settings.UPLOAD_DIR) / job_id
|
||
job_output_dir = RESULTS_DIR / job_id
|
||
|
||
if job_input_dir.exists():
|
||
shutil.rmtree(job_input_dir)
|
||
|
||
if job_output_dir.exists():
|
||
shutil.rmtree(job_output_dir)
|
||
|
||
# 删除数据库记录
|
||
db.delete(job)
|
||
db.commit()
|
||
|
||
return {"message": i18n.t("job_deleted", job_id=job_id)}
|