first add
This commit is contained in:
88
backend/app/workers/pipeline.py
Normal file
88
backend/app/workers/pipeline.py
Normal file
@@ -0,0 +1,88 @@
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from sqlmodel import Session, select
|
||||
|
||||
from . import __init__ # noqa: F401
|
||||
from ..core.celery_app import celery_app
|
||||
from ..core.database import SessionLocal
|
||||
from ..models.job import Job, Step, StepStatus, JobStatus
|
||||
from .steps.run_digger import run_bttoxin_digger
|
||||
|
||||
|
||||
def _with_session():
|
||||
return SessionLocal()
|
||||
|
||||
|
||||
def _update_job(session: Session, job: Job, **changes) -> None:
|
||||
for k, v in changes.items():
|
||||
setattr(job, k, v)
|
||||
session.add(job)
|
||||
session.commit()
|
||||
session.refresh(job)
|
||||
|
||||
|
||||
def _create_or_get_step(session: Session, job_id: str, step_name: str, order: int) -> Step:
|
||||
step = session.exec(select(Step).where(Step.job_id == job_id, Step.step_name == step_name)).first()
|
||||
if step is None:
|
||||
step = Step(job_id=job_id, step_name=step_name, step_order=order, status=StepStatus.PENDING)
|
||||
session.add(step)
|
||||
session.commit()
|
||||
session.refresh(step)
|
||||
return step
|
||||
|
||||
|
||||
@celery_app.task(name="pipeline.orchestrate")
|
||||
def orchestrate_pipeline(job_id: str, config: dict) -> dict:
|
||||
with _with_session() as session:
|
||||
job = session.get(Job, job_id)
|
||||
if job is None:
|
||||
return {"error": "job not found", "job_id": job_id}
|
||||
|
||||
_update_job(
|
||||
session,
|
||||
job,
|
||||
status=JobStatus.RUNNING,
|
||||
started_at=datetime.utcnow(),
|
||||
current_step="run_digger",
|
||||
progress=0,
|
||||
)
|
||||
|
||||
# Step 1: run_digger
|
||||
step = _create_or_get_step(session, job_id, "run_digger", 1)
|
||||
step.status = StepStatus.RUNNING
|
||||
step.started_at = datetime.utcnow()
|
||||
session.add(step)
|
||||
session.commit()
|
||||
session.refresh(step)
|
||||
|
||||
try:
|
||||
ws_root = Path(job.workspace_path or ".")
|
||||
result = run_bttoxin_digger(
|
||||
workspace_root=ws_root,
|
||||
threads=job.threads,
|
||||
sequence_type=job.sequence_type,
|
||||
scaf_suffix=job.scaf_suffix,
|
||||
update_db=job.update_db,
|
||||
)
|
||||
|
||||
step.status = StepStatus.COMPLETED
|
||||
step.completed_at = datetime.utcnow()
|
||||
step.result_data = {"results_dir": result["results_dir"]}
|
||||
session.add(step)
|
||||
session.commit()
|
||||
session.refresh(step)
|
||||
|
||||
_update_job(session, job, progress=100, status=JobStatus.COMPLETED, completed_at=datetime.utcnow())
|
||||
return {"job_id": job_id, "ok": True, "step": "run_digger", "result": result}
|
||||
|
||||
except Exception as exc: # 仅顶层捕获,记录失败
|
||||
step.status = StepStatus.FAILED
|
||||
step.error_message = str(exc)
|
||||
step.completed_at = datetime.utcnow()
|
||||
session.add(step)
|
||||
session.commit()
|
||||
|
||||
_update_job(session, job, status=JobStatus.FAILED, error_message=str(exc))
|
||||
return {"job_id": job_id, "ok": False, "error": str(exc)}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user