first add

This commit is contained in:
2025-10-13 21:05:00 +08:00
parent c7744836e9
commit d71163df00
29 changed files with 144656 additions and 37 deletions

View File

@@ -1,3 +1,18 @@
from celery import Celery
from .config import settings
celery_app = Celery(
"bttoxin",
broker=settings.get_celery_broker_url(),
backend=settings.get_celery_result_backend(),
)
celery_app.conf.update(
task_track_started=True,
worker_prefetch_multiplier=1,
)
"""Celery 配置"""
from celery import Celery
from ..config import settings

View File

@@ -0,0 +1,96 @@
"""应用配置"""
from typing import Optional
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
"""应用配置"""
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
)
# ============== 应用基础配置 ==============
APP_NAME: str = "BtToxin Pipeline"
APP_VERSION: str = "1.0.0"
DEBUG: bool = False
# API 配置
API_V1_PREFIX: str = "/api/v1"
# ============== 数据库配置 ==============
POSTGRES_HOST: str = "localhost"
POSTGRES_PORT: int = 5432
POSTGRES_USER: str = "bttoxin"
POSTGRES_PASSWORD: str = "bttoxin_password"
POSTGRES_DB: str = "bttoxin_db"
@property
def DATABASE_URL(self) -> str:
"""数据库连接 URL"""
return (
f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}"
f"@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}"
)
# ============== Redis 配置 ==============
REDIS_HOST: str = "localhost"
REDIS_PORT: int = 6379
REDIS_DB: int = 0
REDIS_PASSWORD: Optional[str] = None
@property
def REDIS_URL(self) -> str:
"""Redis 连接 URL"""
if self.REDIS_PASSWORD:
return f"redis://:{self.REDIS_PASSWORD}@{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_DB}"
return f"redis://{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_DB}"
# ============== Celery 配置 ==============
CELERY_BROKER_URL: Optional[str] = None
CELERY_RESULT_BACKEND: Optional[str] = None
def get_celery_broker_url(self) -> str:
"""获取 Celery Broker URL"""
return self.CELERY_BROKER_URL or self.REDIS_URL
def get_celery_result_backend(self) -> str:
"""获取 Celery Result Backend URL"""
return self.CELERY_RESULT_BACKEND or self.REDIS_URL
# ============== 工作空间配置 ==============
WORKSPACE_BASE_PATH: str = "/data/jobs"
TEMP_BASE_PATH: str = "/data/temp"
MAX_UPLOAD_SIZE_MB: int = 500
# ============== Docker 配置 ==============
DOCKER_IMAGE: str = "quay.io/biocontainers/bttoxin_digger:1.0.10--hdfd78af_0"
DOCKER_PLATFORM: str = "linux/amd64"
# ============== S3 配置 ==============
S3_ENDPOINT: Optional[str] = None
S3_ACCESS_KEY: Optional[str] = None
S3_SECRET_KEY: Optional[str] = None
S3_BUCKET: str = "bttoxin-results"
S3_REGION: str = "us-east-1"
# ============== 任务配置 ==============
DEFAULT_THREADS: int = 4
MAX_THREADS: int = 16
TASK_TIMEOUT_SECONDS: int = 7200 # 2小时
JOB_RETENTION_DAYS: int = 30
# ============== CORS 配置 ==============
CORS_ORIGINS: list = ["http://localhost:3000", "http://localhost:5173"]
# ============== 日志配置 ==============
LOG_LEVEL: str = "INFO"
LOG_FORMAT: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
# 创建全局配置实例
settings = Settings()

View File

@@ -0,0 +1,50 @@
"""SQLModel 数据库配置"""
from typing import Generator
from sqlmodel import Session, create_engine
from sqlalchemy.orm import sessionmaker
from .config import settings
# 创建数据库引擎
engine = create_engine(
settings.DATABASE_URL,
echo=settings.DEBUG,
pool_pre_ping=True,
pool_size=10,
max_overflow=20,
)
# 创建 SessionLocal
SessionLocal = sessionmaker(
autocommit=False,
autoflush=False,
bind=engine,
class_=Session,
)
def get_session() -> Generator[Session, None, None]:
"""
获取数据库会话(依赖注入)
"""
with SessionLocal() as session:
yield session
def init_db() -> None:
"""初始化数据库(创建所有表)"""
from sqlmodel import SQLModel
from ..models.job import Job, Step, JobLog # noqa: F401
SQLModel.metadata.create_all(engine)
print("✓ Database initialized")
def drop_db() -> None:
"""删除所有表(开发用)"""
from sqlmodel import SQLModel
SQLModel.metadata.drop_all(engine)
print("✓ Database dropped")

View File

@@ -0,0 +1,14 @@
import logging
import sys
def setup_logging(level: str = "INFO", fmt: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s") -> None:
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter(fmt)
handler.setFormatter(formatter)
root = logging.getLogger()
if not root.handlers:
root.addHandler(handler)
root.setLevel(level)