first add

This commit is contained in:
2025-10-13 21:05:00 +08:00
parent c7744836e9
commit d71163df00
29 changed files with 144656 additions and 37 deletions

View File

@@ -0,0 +1,2 @@

View File

@@ -0,0 +1,88 @@
from datetime import datetime
from pathlib import Path
from sqlmodel import Session, select
from . import __init__ # noqa: F401
from ..core.celery_app import celery_app
from ..core.database import SessionLocal
from ..models.job import Job, Step, StepStatus, JobStatus
from .steps.run_digger import run_bttoxin_digger
def _with_session():
return SessionLocal()
def _update_job(session: Session, job: Job, **changes) -> None:
for k, v in changes.items():
setattr(job, k, v)
session.add(job)
session.commit()
session.refresh(job)
def _create_or_get_step(session: Session, job_id: str, step_name: str, order: int) -> Step:
step = session.exec(select(Step).where(Step.job_id == job_id, Step.step_name == step_name)).first()
if step is None:
step = Step(job_id=job_id, step_name=step_name, step_order=order, status=StepStatus.PENDING)
session.add(step)
session.commit()
session.refresh(step)
return step
@celery_app.task(name="pipeline.orchestrate")
def orchestrate_pipeline(job_id: str, config: dict) -> dict:
with _with_session() as session:
job = session.get(Job, job_id)
if job is None:
return {"error": "job not found", "job_id": job_id}
_update_job(
session,
job,
status=JobStatus.RUNNING,
started_at=datetime.utcnow(),
current_step="run_digger",
progress=0,
)
# Step 1: run_digger
step = _create_or_get_step(session, job_id, "run_digger", 1)
step.status = StepStatus.RUNNING
step.started_at = datetime.utcnow()
session.add(step)
session.commit()
session.refresh(step)
try:
ws_root = Path(job.workspace_path or ".")
result = run_bttoxin_digger(
workspace_root=ws_root,
threads=job.threads,
sequence_type=job.sequence_type,
scaf_suffix=job.scaf_suffix,
update_db=job.update_db,
)
step.status = StepStatus.COMPLETED
step.completed_at = datetime.utcnow()
step.result_data = {"results_dir": result["results_dir"]}
session.add(step)
session.commit()
session.refresh(step)
_update_job(session, job, progress=100, status=JobStatus.COMPLETED, completed_at=datetime.utcnow())
return {"job_id": job_id, "ok": True, "step": "run_digger", "result": result}
except Exception as exc: # 仅顶层捕获,记录失败
step.status = StepStatus.FAILED
step.error_message = str(exc)
step.completed_at = datetime.utcnow()
session.add(step)
session.commit()
_update_job(session, job, status=JobStatus.FAILED, error_message=str(exc))
return {"job_id": job_id, "ok": False, "error": str(exc)}

View File

@@ -0,0 +1,46 @@
from __future__ import annotations
from pathlib import Path
from typing import Dict
from ...services.docker_service import DockerRunner
def run_bttoxin_digger(workspace_root: Path, threads: int, sequence_type: str, scaf_suffix: str, update_db: bool) -> Dict:
runner = DockerRunner()
inputs_dir = workspace_root / "inputs"
results_dir = workspace_root / "results"
results_dir.mkdir(parents=True, exist_ok=True)
# 示例命令:根据实际 BtToxin_Digger CLI 调整
command = [
"BtToxin_Digger",
"--threads", str(threads),
"--seq_type", sequence_type,
"--scaf_suffix", scaf_suffix,
"--input", str(inputs_dir),
"--outdir", str(results_dir),
]
if update_db:
command += ["--update_db"]
logs = runner.run(
image=None,
command=command,
workdir=Path("/work"),
mounts={
workspace_root: "/work",
},
env=None,
platform=None,
detach=False,
remove=True,
)
return {
"command": command,
"logs": logs,
"results_dir": str(results_dir),
}