Files
bttoxin-pipeline/backend/app/api/v1/tasks.py
zly 9835b6e341 feat(deploy): fix docker deployment and add backend i18n
- Docker Deployment Fixes:
  - Switch base images to docker.m.daocloud.io to resolve registry 401 errors
  - Add Postgres and Redis services to docker-compose.traefik.yml
  - Fix frontend build: replace missing icons (Globe->Location, Chart->TrendCharts)
  - Fix frontend build: resolve pnpm CI/TTY issues and frozen lockfile errors
  - Add missing backend dependencies (sqlalchemy, psycopg2, redis-py, celery, docker-py) in pixi.toml
  - Ensure database tables are created on startup (lifespan event)

- Backend Internationalization (i18n):
  - Add backend/app/core/i18n.py for locale handling
  - Update API endpoints (jobs, tasks, uploads, results) to return localized messages
  - Support 'Accept-Language' header (en/zh)

- Documentation:
  - Update DOCKER_DEPLOYMENT.md with new architecture and troubleshooting
  - Update AGENTS.md with latest stack details and deployment steps
  - Update @fix_plan.md status

Co-Authored-By: Claude <noreply@anthropic.com>
2026-01-14 12:38:54 +08:00

72 lines
2.3 KiB
Python

"""任务管理 API - 兼容 /api/v1/tasks 路径"""
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from typing import List
from pathlib import Path
from pydantic import BaseModel
from ...database import get_db
from ...models.job import Job, JobStatus
from ...schemas.job import JobResponse
from ...config import settings
from ...core.i18n import I18n, get_i18n
router = APIRouter()
class TaskCreateRequest(BaseModel):
"""任务创建请求"""
files: List[str] # 文件名列表
sequence_type: str = "nucl"
min_identity: float = 0.8
min_coverage: float = 0.6
allow_unknown_families: bool = False
require_index_hit: bool = True
class QueuePosition(BaseModel):
"""队列位置信息"""
position: int
estimated_wait_minutes: int = None
@router.post("/", response_model=JobResponse)
async def create_task(request: TaskCreateRequest, db: Session = Depends(get_db), i18n: I18n = Depends(get_i18n)):
"""创建新任务(兼容前端)"""
# 暂时复用 jobs 逻辑
# TODO: 实现完整的文件上传和处理
raise HTTPException(status_code=501, detail=i18n.t("use_create_endpoint"))
@router.get("/{task_id}", response_model=JobResponse)
async def get_task(task_id: str, db: Session = Depends(get_db), i18n: I18n = Depends(get_i18n)):
"""获取任务状态"""
job = db.query(Job).filter(Job.id == task_id).first()
if not job:
raise HTTPException(status_code=404, detail=i18n.t("task_not_found"))
return job
@router.get("/{task_id}/queue")
async def get_queue_position(task_id: str, db: Session = Depends(get_db), i18n: I18n = Depends(get_i18n)):
"""获取排队位置"""
job = db.query(Job).filter(Job.id == task_id).first()
if not job:
raise HTTPException(status_code=404, detail=i18n.t("task_not_found"))
if job.status not in [JobStatus.PENDING, JobStatus.QUEUED]:
return {"position": 0, "message": i18n.t("task_not_in_queue")}
# 计算排队位置
ahead_jobs = db.query(Job).filter(
Job.id != task_id,
Job.status.in_([JobStatus.PENDING, JobStatus.QUEUED]),
Job.created_at < job.created_at
).count()
position = ahead_jobs + 1
# 假设每个任务约5分钟
estimated_wait = position * 5
return QueuePosition(position=position, estimated_wait_minutes=estimated_wait)