Initial commit: BtToxin Pipeline project structure

This commit is contained in:
2025-10-13 19:22:56 +08:00
commit c7744836e9
37 changed files with 1146 additions and 0 deletions

21
.env.example Normal file
View File

@@ -0,0 +1,21 @@
# Application
APP_NAME=BtToxin Pipeline
DEBUG=false
# Database
DATABASE_URL=postgresql://postgres:password@localhost:5432/bttoxin
# Redis
REDIS_URL=redis://localhost:6379/0
# S3/MinIO
S3_ENDPOINT=http://localhost:9000
S3_ACCESS_KEY=minioadmin
S3_SECRET_KEY=minioadmin
S3_BUCKET=bttoxin-results
# Docker
DOCKER_IMAGE=quay.io/biocontainers/bttoxin_digger:1.0.10--hdfd78af_0
# NCBI
NCBI_EMAIL=your_email@example.com

29
.gitignore vendored Normal file
View File

@@ -0,0 +1,29 @@
# Python
__pycache__/
*.py[cod]
*.so
.Python
venv/
*.egg-info/
# Node
node_modules/
dist/
# System
.DS_Store
# Environment
.env
*.log
# Data
uploads/
results/
logs/
tests/test_data/genomes/*.fna
# IDE
.vscode/
.idea/
*.swp

15
.woodpecker/test.yml Normal file
View File

@@ -0,0 +1,15 @@
when:
event: [push, pull_request]
steps:
prepare:
image: bttoxin-digger:latest
commands:
- echo "Preparing environment..."
- BtToxin_Digger --version
test:
image: python:3.10
commands:
- pip install pytest httpx
- pytest tests/

21
Makefile Normal file
View File

@@ -0,0 +1,21 @@
.PHONY: help dev build test clean
help:
@echo "BtToxin Pipeline Commands:"
@echo " make dev - Start development environment"
@echo " make build - Build Docker images"
@echo " make test - Run tests"
@echo " make clean - Clean up"
dev:
docker compose -f config/docker-compose.yml up -d
build:
docker compose -f config/docker-compose.yml build
test:
cd backend && pytest
clean:
docker compose -f config/docker-compose.yml down -v
rm -rf uploads/ results/ logs/

51
README.md Normal file
View File

@@ -0,0 +1,51 @@
# BtToxin Pipeline
Automated Bacillus thuringiensis toxin mining system with CI/CD integration.
## Quick Start
### Prerequisites
- Docker / Podman
- Python 3.10+
- Node.js 18+
### Development Setup
```bash
# 1. Clone and setup
git clone <your-repo>
cd bttoxin-pipeline
# 2. Install dependencies
cd backend && pip install -r requirements.txt
cd ../frontend && npm install
# 3. Start services
docker compose -f config/docker-compose.yml up -d
# 4. Run backend
cd backend
uvicorn app.main:app --reload
# 5. Run frontend
cd frontend
npm run dev
```
## Architecture
Frontend (Vue 3) → Backend (FastAPI) → Celery → Docker (BtToxin_Digger)
PostgreSQL + Redis
S3/MinIO
## Documentation
- [API Documentation](docs/api.md)
- [Deployment Guide](docs/deployment.md)
- [Usage Guide](docs/usage.md)
## License
MIT License

2
backend/app/__init__.py Normal file
View File

@@ -0,0 +1,2 @@
"""BtToxin Pipeline Backend Application"""
__version__ = "1.0.0"

View File

View File

@@ -0,0 +1,98 @@
"""任务管理 API"""
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
from sqlalchemy.orm import Session
from typing import List
from pathlib import Path
import uuid
import shutil
from ...database import get_db
from ...models.job import Job, JobStatus
from ...schemas.job import JobResponse
from ...workers.tasks import run_bttoxin_analysis
from ...config import settings
router = APIRouter()
UPLOAD_DIR = Path(settings.UPLOAD_DIR)
RESULTS_DIR = Path(settings.RESULTS_DIR)
UPLOAD_DIR.mkdir(exist_ok=True)
RESULTS_DIR.mkdir(exist_ok=True)
@router.post("/create", response_model=JobResponse)
async def create_job(
files: List[UploadFile] = File(...),
sequence_type: str = "nucl",
scaf_suffix: str = ".fna",
threads: int = 4,
db: Session = Depends(get_db)
):
"""创建新任务"""
job_id = str(uuid.uuid4())
job_input_dir = UPLOAD_DIR / job_id
job_output_dir = RESULTS_DIR / job_id
job_input_dir.mkdir(parents=True)
job_output_dir.mkdir(parents=True)
uploaded_files = []
for file in files:
file_path = job_input_dir / file.filename
with open(file_path, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
uploaded_files.append(file.filename)
job = Job(
id=job_id,
status=JobStatus.PENDING,
input_files=uploaded_files,
sequence_type=sequence_type,
scaf_suffix=scaf_suffix,
threads=threads
)
db.add(job)
db.commit()
db.refresh(job)
task = run_bttoxin_analysis.delay(
job_id=job_id,
input_dir=str(job_input_dir),
output_dir=str(job_output_dir),
sequence_type=sequence_type,
scaf_suffix=scaf_suffix,
threads=threads
)
job.celery_task_id = task.id
db.commit()
return job
@router.get("/{job_id}", response_model=JobResponse)
async def get_job(job_id: str, db: Session = Depends(get_db)):
"""获取任务详情"""
job = db.query(Job).filter(Job.id == job_id).first()
if not job:
raise HTTPException(status_code=404, detail="Job not found")
return job
@router.get("/{job_id}/progress")
async def get_job_progress(job_id: str, db: Session = Depends(get_db)):
"""获取任务进度"""
job = db.query(Job).filter(Job.id == job_id).first()
if not job:
raise HTTPException(status_code=404, detail="Job not found")
if job.celery_task_id:
from ...core.celery_app import celery_app
task = celery_app.AsyncResult(job.celery_task_id)
return {
'job_id': job_id,
'status': job.status,
'celery_state': task.state,
'progress': task.info if task.state == 'PROGRESS' else None
}
return {'job_id': job_id, 'status': job.status}

View File

@@ -0,0 +1,8 @@
"""结果查询 API"""
from fastapi import APIRouter
router = APIRouter()
@router.get("/")
async def results_info():
return {"message": "Results endpoint"}

View File

@@ -0,0 +1,8 @@
"""文件上传 API"""
from fastapi import APIRouter
router = APIRouter()
@router.get("/")
async def upload_info():
return {"message": "Upload endpoint"}

47
backend/app/config.py Normal file
View File

@@ -0,0 +1,47 @@
"""应用配置"""
from pydantic_settings import BaseSettings
from typing import Optional
class Settings(BaseSettings):
"""应用设置"""
# 应用基础配置
APP_NAME: str = "BtToxin Pipeline"
APP_VERSION: str = "1.0.0"
DEBUG: bool = False
# API 配置
API_V1_STR: str = "/api/v1"
# 数据库
DATABASE_URL: str = "postgresql://postgres:password@localhost:5432/bttoxin"
# Redis
REDIS_URL: str = "redis://localhost:6379/0"
# S3/MinIO
S3_ENDPOINT: Optional[str] = None
S3_ACCESS_KEY: str = ""
S3_SECRET_KEY: str = ""
S3_BUCKET: str = "bttoxin-results"
S3_REGION: str = "us-east-1"
# Docker
DOCKER_IMAGE: str = "quay.io/biocontainers/bttoxin_digger:1.0.10--hdfd78af_0"
# 文件路径
UPLOAD_DIR: str = "uploads"
RESULTS_DIR: str = "results"
# Celery
CELERY_BROKER_URL: str = "redis://localhost:6379/0"
CELERY_RESULT_BACKEND: str = "redis://localhost:6379/0"
# CORS
CORS_ORIGINS: list = ["http://localhost:3000", "http://localhost:5173"]
class Config:
env_file = ".env"
case_sensitive = True
settings = Settings()

View File

@@ -0,0 +1,21 @@
"""Celery 配置"""
from celery import Celery
from ..config import settings
celery_app = Celery(
"bttoxin_worker",
broker=settings.CELERY_BROKER_URL,
backend=settings.CELERY_RESULT_BACKEND,
include=['app.workers.tasks']
)
celery_app.conf.update(
task_serializer='json',
accept_content=['json'],
result_serializer='json',
timezone='UTC',
enable_utc=True,
task_track_started=True,
task_time_limit=7200,
worker_prefetch_multiplier=1,
)

View File

@@ -0,0 +1,72 @@
"""Docker 客户端管理"""
import docker
from typing import Dict, Any
from pathlib import Path
import logging
logger = logging.getLogger(__name__)
class DockerManager:
"""Docker 容器管理器"""
def __init__(self, image: str = None):
from ..config import settings
self.client = docker.from_env()
self.image = image or settings.DOCKER_IMAGE
def ensure_image(self) -> bool:
"""确保镜像存在"""
try:
self.client.images.get(self.image)
return True
except docker.errors.ImageNotFound:
logger.info(f"Pulling image {self.image}...")
self.client.images.pull(self.image)
return True
def run_bttoxin_digger(
self,
input_dir: Path,
output_dir: Path,
sequence_type: str = "nucl",
scaf_suffix: str = ".fna",
threads: int = 4
) -> Dict[str, Any]:
"""运行 BtToxin_Digger"""
self.ensure_image()
volumes = {
str(input_dir.absolute()): {'bind': '/data', 'mode': 'ro'},
str(output_dir.absolute()): {'bind': '/results', 'mode': 'rw'}
}
command = [
"/usr/local/env-execute", "BtToxin_Digger",
"--SeqPath", "/data",
"--SequenceType", sequence_type,
"--Scaf_suffix", scaf_suffix,
"--threads", str(threads)
]
try:
container = self.client.containers.run(
self.image,
command=command,
volumes=volumes,
platform="linux/amd64",
detach=True,
remove=False
)
result = container.wait()
logs = container.logs().decode('utf-8')
container.remove()
return {
'success': result['StatusCode'] == 0,
'logs': logs,
'exit_code': result['StatusCode']
}
except Exception as e:
logger.error(f"Error: {e}")
return {'success': False, 'error': str(e)}

24
backend/app/database.py Normal file
View File

@@ -0,0 +1,24 @@
"""数据库连接"""
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from .config import settings
engine = create_engine(
settings.DATABASE_URL,
pool_pre_ping=True,
echo=settings.DEBUG
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""获取数据库会话"""
db = SessionLocal()
try:
yield db
finally:
db.close()

49
backend/app/main.py Normal file
View File

@@ -0,0 +1,49 @@
"""FastAPI 主应用"""
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from contextlib import asynccontextmanager
from .config import settings
from .api.v1 import jobs, upload, results
@asynccontextmanager
async def lifespan(app: FastAPI):
"""应用生命周期管理"""
# 启动时
print("🚀 Starting BtToxin Pipeline API...")
yield
# 关闭时
print("👋 Shutting down BtToxin Pipeline API...")
app = FastAPI(
title=settings.APP_NAME,
version=settings.APP_VERSION,
description="Automated Bacillus thuringiensis toxin mining pipeline",
lifespan=lifespan
)
# CORS
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# 路由
app.include_router(jobs.router, prefix=f"{settings.API_V1_STR}/jobs", tags=["jobs"])
app.include_router(upload.router, prefix=f"{settings.API_V1_STR}/upload", tags=["upload"])
app.include_router(results.router, prefix=f"{settings.API_V1_STR}/results", tags=["results"])
@app.get("/")
async def root():
return {
"name": settings.APP_NAME,
"version": settings.APP_VERSION,
"status": "healthy"
}
@app.get("/health")
async def health():
return {"status": "ok"}

32
backend/app/models/job.py Normal file
View File

@@ -0,0 +1,32 @@
"""任务模型"""
from sqlalchemy import Column, String, Integer, DateTime, JSON, Enum, Text
from sqlalchemy.sql import func
import enum
from ..database import Base
class JobStatus(str, enum.Enum):
PENDING = "pending"
RUNNING = "running"
COMPLETED = "completed"
FAILED = "failed"
class Job(Base):
__tablename__ = "jobs"
id = Column(String, primary_key=True, index=True)
celery_task_id = Column(String, nullable=True)
status = Column(Enum(JobStatus), default=JobStatus.PENDING)
input_files = Column(JSON)
sequence_type = Column(String, default="nucl")
scaf_suffix = Column(String, default=".fna")
threads = Column(Integer, default=4)
result_url = Column(String, nullable=True)
logs = Column(Text, nullable=True)
error_message = Column(Text, nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
completed_at = Column(DateTime(timezone=True), nullable=True)

View File

@@ -0,0 +1,31 @@
"""任务 Schema"""
from pydantic import BaseModel
from typing import Optional, List
from datetime import datetime
from enum import Enum
class JobStatus(str, Enum):
PENDING = "pending"
RUNNING = "running"
COMPLETED = "completed"
FAILED = "failed"
class JobCreate(BaseModel):
input_files: List[str]
sequence_type: str = "nucl"
scaf_suffix: str = ".fna"
threads: int = 4
class JobResponse(BaseModel):
id: str
status: JobStatus
input_files: List[str]
sequence_type: str
threads: int
result_url: Optional[str] = None
error_message: Optional[str] = None
created_at: datetime
completed_at: Optional[datetime] = None
class Config:
from_attributes = True

View File

@@ -0,0 +1,64 @@
"""Celery 任务"""
from celery import Task
from pathlib import Path
import shutil
import logging
from ..core.celery_app import celery_app
from ..core.docker_client import DockerManager
from ..database import SessionLocal
from ..models.job import Job, JobStatus
logger = logging.getLogger(__name__)
@celery_app.task(bind=True)
def run_bttoxin_analysis(
self,
job_id: str,
input_dir: str,
output_dir: str,
sequence_type: str = "nucl",
scaf_suffix: str = ".fna",
threads: int = 4
):
"""执行分析任务"""
db = SessionLocal()
try:
job = db.query(Job).filter(Job.id == job_id).first()
job.status = JobStatus.RUNNING
db.commit()
self.update_state(
state='PROGRESS',
meta={'current': 20, 'total': 100, 'status': 'Running analysis...'}
)
docker_manager = DockerManager()
result = docker_manager.run_bttoxin_digger(
input_dir=Path(input_dir),
output_dir=Path(output_dir),
sequence_type=sequence_type,
scaf_suffix=scaf_suffix,
threads=threads
)
if result['success']:
job.status = JobStatus.COMPLETED
job.logs = result.get('logs', '')
else:
job.status = JobStatus.FAILED
job.error_message = result.get('error', 'Analysis failed')
db.commit()
return {'job_id': job_id, 'status': job.status}
except Exception as e:
logger.error(f"Task failed: {e}")
job.status = JobStatus.FAILED
job.error_message = str(e)
db.commit()
raise
finally:
db.close()

38
backend/requirements.txt Normal file
View File

@@ -0,0 +1,38 @@
# Web 框架
fastapi==0.115.5
uvicorn[standard]==0.32.1
python-multipart==0.0.20
# 任务队列
celery==5.4.0
redis==5.2.1
flower==2.0.1
# 容器管理
docker==7.1.0
# 数据库
sqlalchemy==2.0.36
alembic==1.14.0
psycopg2-binary==2.9.10
# 对象存储
boto3==1.35.78
minio==7.2.11
# 数据处理
biopython==1.84
pandas==2.2.3
# 工具
pydantic==2.10.4
pydantic-settings==2.6.1
python-dotenv==1.0.1
aiofiles==24.1.0
# 监控
prometheus-client==0.21.0
# 测试
pytest==8.3.4
httpx==0.28.1

75
config/docker-compose.yml Normal file
View File

@@ -0,0 +1,75 @@
version: '3.8'
services:
postgres:
image: postgres:15
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
POSTGRES_DB: bttoxin
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
redis:
image: redis:7-alpine
ports:
- "6379:6379"
minio:
image: minio/minio
command: server /data --console-address ":9001"
environment:
MINIO_ROOT_USER: minioadmin
MINIO_ROOT_PASSWORD: minioadmin
ports:
- "9000:9000"
- "9001:9001"
volumes:
- minio_data:/data
backend:
build:
context: ..
dockerfile: docker/worker/Dockerfile
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
ports:
- "8000:8000"
volumes:
- ../backend:/app
- /var/run/docker.sock:/var/run/docker.sock
environment:
DATABASE_URL: postgresql://postgres:password@postgres:5432/bttoxin
REDIS_URL: redis://redis:6379/0
depends_on:
- postgres
- redis
celery:
build:
context: ..
dockerfile: docker/worker/Dockerfile
command: celery -A app.core.celery_app worker --loglevel=info
volumes:
- ../backend:/app
- /var/run/docker.sock:/var/run/docker.sock
environment:
DATABASE_URL: postgresql://postgres:password@postgres:5432/bttoxin
REDIS_URL: redis://redis:6379/0
depends_on:
- postgres
- redis
frontend:
build:
context: ..
dockerfile: docker/frontend/Dockerfile
ports:
- "3000:80"
depends_on:
- backend
volumes:
postgres_data:
minio_data:

46
docker/digger/Dockerfile Normal file
View File

@@ -0,0 +1,46 @@
# BtToxin_Digger Docker 镜像
FROM continuumio/miniconda3:4.10.3
LABEL maintainer="your-email@example.com" \
description="BtToxin_Digger for Pipeline" \
version="1.0.10"
ENV LANG=C.UTF-8 \
PATH=/opt/conda/envs/bttoxin/bin:$PATH \
CONDA_DEFAULT_ENV=bttoxin \
PYTHONUNBUFFERED=1
# 配置 conda
RUN conda config --add channels defaults && \
conda config --add channels bioconda && \
conda config --add channels conda-forge && \
conda config --set channel_priority flexible && \
conda install -n base -c conda-forge mamba -y
# 安装 BtToxin_Digger
RUN mamba install -y \
python=3.7.10 \
perl=5.26.2 && \
mamba install -y \
bttoxin_digger=1.0.10 && \
conda clean -afy
# 安装额外工具
RUN apt-get update && apt-get install -y \
curl wget jq git && \
rm -rf /var/lib/apt/lists/*
# 创建工作目录
RUN mkdir -p /workspace/input /workspace/output /workspace/logs
WORKDIR /workspace
# 复制入口脚本
COPY entrypoint.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/entrypoint.sh
HEALTHCHECK --interval=30s --timeout=10s --retries=3 \
CMD BtToxin_Digger --version || exit 1
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
CMD ["--help"]

84
docker/digger/entrypoint.sh Executable file
View File

@@ -0,0 +1,84 @@
#!/bin/bash
set -e
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
log_info() {
echo -e "${GREEN}[INFO]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
init() {
log_info "Initializing BtToxin_Digger environment..."
if ! BtToxin_Digger --version &>/dev/null; then
log_error "BtToxin_Digger not found!"
exit 1
fi
log_info "BtToxin_Digger $(BtToxin_Digger --version)"
log_info "Python $(python --version)"
log_info "Perl $(perl --version | head -2 | tail -1)"
}
update_db() {
log_info "Updating BtToxin_Digger database..."
if BtToxin_Digger --update-db; then
log_info "Database updated successfully"
else
log_warn "Database update failed, using existing database"
fi
}
run_analysis() {
log_info "Starting toxin mining analysis..."
log_info "Input: $INPUT_PATH"
log_info "Type: $SEQUENCE_TYPE"
log_info "Threads: $THREADS"
BtToxin_Digger \
--SeqPath "$INPUT_PATH" \
--SequenceType "$SEQUENCE_TYPE" \
--Scaf_suffix "$SCAF_SUFFIX" \
--threads "$THREADS" \
2>&1 | tee /workspace/logs/digger.log
if [ $? -eq 0 ]; then
log_info "Analysis completed successfully"
else
log_error "Analysis failed!"
exit 1
fi
}
main() {
INPUT_PATH="${INPUT_PATH:-/workspace/input}"
SEQUENCE_TYPE="${SEQUENCE_TYPE:-nucl}"
SCAF_SUFFIX="${SCAF_SUFFIX:-.fna}"
THREADS="${THREADS:-4}"
UPDATE_DB="${UPDATE_DB:-false}"
init
if [ "$UPDATE_DB" = "true" ]; then
update_db
fi
if [ $# -gt 0 ]; then
exec BtToxin_Digger "$@"
else
run_analysis
fi
}
main "$@"

View File

@@ -0,0 +1,18 @@
FROM node:18-alpine as builder
WORKDIR /app
COPY frontend/package*.json ./
RUN npm ci
COPY frontend/ .
RUN npm run build
FROM nginx:alpine
COPY --from=builder /app/dist /usr/share/nginx/html
COPY docker/frontend/nginx.conf /etc/nginx/conf.d/default.conf
EXPOSE 80
CMD ["nginx", "-g", "daemon off;"]

View File

@@ -0,0 +1,24 @@
server {
listen 80;
server_name localhost;
root /usr/share/nginx/html;
index index.html;
location / {
try_files $uri $uri/ /index.html;
}
location /api {
proxy_pass http://backend:8000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
location /ws {
proxy_pass http://backend:8000;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
}

25
docker/worker/Dockerfile Normal file
View File

@@ -0,0 +1,25 @@
FROM python:3.10-slim
WORKDIR /app
# 安装系统依赖
RUN apt-get update && apt-get install -y \
gcc \
curl \
&& rm -rf /var/lib/apt/lists/*
# 安装 Docker CLI
RUN curl -fsSL https://get.docker.com -o get-docker.sh && \
sh get-docker.sh && \
rm get-docker.sh
# 复制依赖文件
COPY backend/requirements.txt .
# 安装 Python 依赖
RUN pip install --no-cache-dir -r requirements.txt
# 复制应用代码
COPY backend/ .
CMD ["celery", "-A", "app.core.celery_app", "worker", "--loglevel=info"]

12
frontend/index.html Normal file
View File

@@ -0,0 +1,12 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>BtToxin Pipeline</title>
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.js"></script>
</body>
</html>

22
frontend/package.json Normal file
View File

@@ -0,0 +1,22 @@
{
"name": "bttoxin-frontend",
"version": "1.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview"
},
"dependencies": {
"vue": "^3.5.13",
"vue-router": "^4.5.0",
"pinia": "^2.3.0",
"axios": "^1.7.9",
"naive-ui": "^2.40.1",
"@vicons/ionicons5": "^0.12.0"
},
"devDependencies": {
"@vitejs/plugin-vue": "^5.2.1",
"vite": "^6.0.5"
}
}

41
frontend/src/App.vue Normal file
View File

@@ -0,0 +1,41 @@
<template>
<n-config-provider :theme="darkTheme">
<n-layout>
<n-layout-header bordered>
<n-space align="center" justify="space-between" style="padding: 16px">
<n-h2>BtToxin Pipeline</n-h2>
<n-menu mode="horizontal" :options="menuOptions" />
</n-space>
</n-layout-header>
<n-layout-content style="padding: 24px">
<router-view />
</n-layout-content>
<n-layout-footer bordered style="padding: 16px; text-align: center">
BtToxin Pipeline © 2025
</n-layout-footer>
</n-layout>
</n-config-provider>
</template>
<script setup>
import { darkTheme } from 'naive-ui'
import { h } from 'vue'
import { RouterLink } from 'vue-router'
const menuOptions = [
{
label: () => h(RouterLink, { to: '/' }, { default: () => 'Home' }),
key: 'home'
},
{
label: () => h(RouterLink, { to: '/upload' }, { default: () => 'Upload' }),
key: 'upload'
},
{
label: () => h(RouterLink, { to: '/jobs' }, { default: () => 'Jobs' }),
key: 'jobs'
}
]
</script>

14
frontend/src/main.js Normal file
View File

@@ -0,0 +1,14 @@
import { createApp } from 'vue'
import { createPinia } from 'pinia'
import naive from 'naive-ui'
import App from './App.vue'
import router from './router'
const app = createApp(App)
const pinia = createPinia()
app.use(pinia)
app.use(router)
app.use(naive)
app.mount('#app')

31
frontend/src/router.js Normal file
View File

@@ -0,0 +1,31 @@
import { createRouter, createWebHistory } from 'vue-router'
const routes = [
{
path: '/',
name: 'Home',
component: () => import('./views/Home.vue')
},
{
path: '/upload',
name: 'Upload',
component: () => import('./views/Upload.vue')
},
{
path: '/jobs',
name: 'Jobs',
component: () => import('./views/Jobs.vue')
},
{
path: '/jobs/:id',
name: 'JobDetail',
component: () => import('./views/JobDetail.vue')
}
]
const router = createRouter({
history: createWebHistory(),
routes
})
export default router

View File

@@ -0,0 +1,22 @@
import axios from 'axios'
const api = axios.create({
baseURL: '/api/v1',
timeout: 30000
})
export default {
createJob(formData) {
return api.post('/jobs/create', formData, {
headers: { 'Content-Type': 'multipart/form-data' }
})
},
getJob(jobId) {
return api.get(`/jobs/${jobId}`)
},
getJobProgress(jobId) {
return api.get(`/jobs/${jobId}/progress`)
}
}

View File

@@ -0,0 +1,7 @@
<template>
<n-space vertical size="large">
<n-card title="Welcome to BtToxin Pipeline">
<p>Automated Bacillus thuringiensis toxin mining system</p>
</n-card>
</n-space>
</template>

View File

@@ -0,0 +1,5 @@
<template>
<n-card title="Job Details">
<p>Job ID: {{ $route.params.id }}</p>
</n-card>
</template>

View File

@@ -0,0 +1,5 @@
<template>
<n-card title="Job List">
<n-empty description="No jobs yet" />
</n-card>
</template>

View File

@@ -0,0 +1,7 @@
<template>
<n-card title="Upload Genome Files">
<n-upload multiple>
<n-button>Select Files</n-button>
</n-upload>
</n-card>
</template>

21
frontend/vite.config.js Normal file
View File

@@ -0,0 +1,21 @@
import { defineConfig } from 'vite'
import vue from '@vitejs/plugin-vue'
import { fileURLToPath, URL } from 'node:url'
export default defineConfig({
plugins: [vue()],
resolve: {
alias: {
'@': fileURLToPath(new URL('./src', import.meta.url))
}
},
server: {
port: 3000,
proxy: {
'/api': {
target: 'http://localhost:8000',
changeOrigin: true
}
}
}
})

56
scripts/download_bpprc_data.py Executable file
View File

@@ -0,0 +1,56 @@
#!/usr/bin/env python3
"""从 BPPRC/NCBI 下载测试数据"""
import os
import argparse
from pathlib import Path
from Bio import Entrez, SeqIO
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
Entrez.email = "your_email@example.com"
TEST_GENOMES = {
'Bacillus_thuringiensis_HD-73': 'NZ_CP004069.1',
'Bacillus_thuringiensis_YBT-1520': 'NZ_CP003889.1',
'Bacillus_thuringiensis_BMB171': 'NC_014171.1',
}
def download_genome(accession, output_file):
"""下载基因组"""
try:
logger.info(f"Downloading {accession}...")
handle = Entrez.efetch(
db="nucleotide",
id=accession,
rettype="fasta",
retmode="text"
)
with open(output_file, 'w') as f:
f.write(handle.read())
handle.close()
logger.info(f"✓ Downloaded: {output_file}")
return True
except Exception as e:
logger.error(f"✗ Failed: {e}")
return False
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--output-dir', default='tests/test_data/genomes')
parser.add_argument('--email', required=True)
args = parser.parse_args()
Entrez.email = args.email
output_dir = Path(args.output_dir)
output_dir.mkdir(parents=True, exist_ok=True)
for name, accession in TEST_GENOMES.items():
output_file = output_dir / f"{name}.fna"
download_genome(accession, output_file)
if __name__ == '__main__':
main()