453 lines
19 KiB
JavaScript
453 lines
19 KiB
JavaScript
|
||
const express = require('express');
|
||
const router = express.Router();
|
||
const multer = require('multer');
|
||
const { nanoid } = require('nanoid');
|
||
const path = require('path');
|
||
const fs = require('fs');
|
||
|
||
// Import separated modules
|
||
const mediaModel = require('../utils/mediaModel');
|
||
const jobService = require('../utils/job');
|
||
const { pool } = require('../config/db');
|
||
|
||
// --- Multer Config ---
|
||
const storage = multer.diskStorage({
|
||
destination: (req, file, cb) => {
|
||
const jobId = req.jobId;
|
||
// 使用项目根目录下的 results 文件夹,而不是 routes/results
|
||
const dir = path.join(__dirname, '..', 'results', jobId, 'uploads');
|
||
try {
|
||
fs.mkdirSync(dir, { recursive: true });
|
||
console.log(`[Upload] Created directory: ${dir}`);
|
||
console.log(`[Upload] Job ID: ${jobId}, File: ${file.originalname}`);
|
||
} catch (error) {
|
||
console.error(`[Upload] Failed to create directory ${dir}:`, error);
|
||
return cb(error);
|
||
}
|
||
cb(null, dir);
|
||
},
|
||
filename: (req, file, cb) => {
|
||
console.log(`[Upload] Saving file: ${file.originalname}`);
|
||
cb(null, file.originalname);
|
||
}
|
||
});
|
||
const upload = multer({ storage }).array('files', 30);
|
||
|
||
// --- Task API Routes ---
|
||
|
||
router.post('/upload', (req, res, next) => {
|
||
req.jobId = nanoid(12);
|
||
console.log(`[Upload] New upload request, Job ID: ${req.jobId}`);
|
||
next();
|
||
}, upload, async (req, res) => {
|
||
try {
|
||
console.log(`[Upload] Files received: ${req.files ? req.files.length : 0}`);
|
||
console.log(`[Upload] Analysis type: ${req.body?.analysis_type || 'nutrition'}`);
|
||
console.log(`[Upload] Email: ${req.body?.email || 'none'}`);
|
||
|
||
if (!req.files || req.files.length === 0) {
|
||
console.error('[Upload] No files received');
|
||
return res.status(400).json({ success: false, error: 'No files' });
|
||
}
|
||
|
||
const fileData = req.files.map(f => {
|
||
console.log(`[Upload] File saved: ${f.path}, Size: ${f.size} bytes`);
|
||
return {
|
||
filePath: f.path,
|
||
fileName: f.originalname,
|
||
fileSize: f.size,
|
||
mimeType: f.mimetype
|
||
};
|
||
});
|
||
|
||
console.log(`[Upload] Creating job with ID: ${req.jobId}`);
|
||
const job = jobService.createJob(req.jobId, req.body?.analysis_type || 'nutrition', req.body?.email, fileData);
|
||
|
||
console.log(`[Upload] Job created successfully: ${job.id}`);
|
||
res.json({ success: true, analysis_id: job.id, message: 'Job started' });
|
||
} catch (e) {
|
||
console.error('[Upload] Error:', e);
|
||
res.status(500).json({ success: false, error: e.message });
|
||
}
|
||
});
|
||
|
||
router.get('/status/:id', (req, res) => {
|
||
const job = jobService.getJob(req.params.id);
|
||
if (!job) return res.status(404).json({ success: false, error: 'Job not found' });
|
||
|
||
res.json({
|
||
success: true,
|
||
status: job.status,
|
||
progress: job.progress,
|
||
eta_seconds: job.eta_seconds,
|
||
result: job.status === 'completed' ? job.result : undefined,
|
||
error: job.status === 'failed' ? job.error : undefined
|
||
});
|
||
});
|
||
|
||
router.post('/stop/:id', async (req, res) => {
|
||
try {
|
||
await jobService.stopJob(req.params.id);
|
||
res.json({ success: true, message: 'Job stopped' });
|
||
} catch (e) {
|
||
res.status(500).json({ success: false, error: e.message });
|
||
}
|
||
});
|
||
|
||
router.get('/download/:id', (req, res) => {
|
||
const job = jobService.getJob(req.params.id);
|
||
if (!job || job.status !== 'completed' || !job.result) return res.status(400).json({ error: 'Not ready' });
|
||
res.download(job.result.result_file, `analysis_${req.params.id}.csv`);
|
||
});
|
||
|
||
router.post('/send-results-email', async (req, res) => {
|
||
try {
|
||
await jobService.resendEmail(req.body.analysis_id, req.body.email);
|
||
res.json({ success: true });
|
||
} catch (e) {
|
||
res.status(500).json({ success: false, error: e.message });
|
||
}
|
||
});
|
||
|
||
// --- Data & Browse API Routes ---
|
||
|
||
router.get('/browse', async (req, res) => {
|
||
try {
|
||
const { page = 1, pageSize = 20, chartData = false } = req.query;
|
||
|
||
const [data, total] = await Promise.all([
|
||
mediaModel.getBrowseData(req.query, page, pageSize),
|
||
mediaModel.getTotalCount(req.query)
|
||
]);
|
||
// console.log(data)
|
||
// console.log(total)
|
||
let sankeyData = null;
|
||
if (chartData === 'true') {
|
||
sankeyData = await mediaModel.getSankeyData(req.query);
|
||
}
|
||
// console.log(sankeyData)
|
||
res.json({
|
||
success: true,
|
||
data,
|
||
pagination: {
|
||
currentPage: parseInt(page),
|
||
pageSize: parseInt(pageSize),
|
||
total,
|
||
totalPages: Math.ceil(total / pageSize)
|
||
},
|
||
sankeyData
|
||
});
|
||
} catch (e) {
|
||
res.status(500).json({ success: false, error: e.message });
|
||
}
|
||
});
|
||
|
||
router.get('/microbial-detail', async (req, res) => {
|
||
try {
|
||
const data = await mediaModel.getMicrobialDetail(req.query.name, req.query.level);
|
||
if (!data) return res.json({ success: false, message: 'Not found' });
|
||
res.json({ success: true, data });
|
||
} catch (e) {
|
||
res.status(500).json({ success: false, error: e.message });
|
||
}
|
||
});
|
||
|
||
router.get('/taxonomy-stats', async (req, res) => {
|
||
try {
|
||
const stats = await mediaModel.getTaxonomyStats(req.query.level || 'phylum', req.query.cultured_type || 'cultured');
|
||
res.json({ success: true, ...stats });
|
||
} catch (e) { res.status(500).json({ error: e.message }); }
|
||
});
|
||
|
||
router.get('/physchem-stats', async (req, res) => {
|
||
try {
|
||
const { type = 'o2', cultured_type = 'cultured' } = req.query;
|
||
|
||
// 验证 type 参数
|
||
const validTypes = ['o2', 'ph', 'temperature'];
|
||
if (!validTypes.includes(type)) {
|
||
return res.status(400).json({
|
||
success: false,
|
||
message: 'Invalid physical and chemical properties type',
|
||
validTypes: validTypes
|
||
});
|
||
}
|
||
|
||
const data = await mediaModel.getPhyschemStats(type, cultured_type);
|
||
res.json({ success: true, data, type: type });
|
||
} catch (e) {
|
||
console.error('Error in /physchem-stats:', e);
|
||
res.status(500).json({
|
||
success: false,
|
||
error: e.message,
|
||
message: 'Failed to get physical and chemical properties statistics'
|
||
});
|
||
}
|
||
});
|
||
|
||
router.get('/nutrition-stats', async (req, res) => {
|
||
try {
|
||
const stats = await mediaModel.getNutritionStats(req.query.cultured_type || 'cultured');
|
||
res.json({ success: true, ...stats });
|
||
} catch (e) { res.status(500).json({ error: e.message }); }
|
||
});
|
||
|
||
router.get('/sunburst-stats', async (req, res) => {
|
||
try {
|
||
const data = await mediaModel.getSunburstData(req.query);
|
||
res.json({ success: true, data, total: data.length });
|
||
} catch (e) { res.status(500).json({ error: e.message }); }
|
||
});
|
||
|
||
router.get('/health', async (req, res) => {
|
||
try {
|
||
await mediaModel.checkHealth();
|
||
res.json({ success: true });
|
||
} catch (e) { res.status(500).json({ error: e.message }); }
|
||
});
|
||
|
||
// 下载数据 API
|
||
router.get('/download-data/:type', async (req, res) => {
|
||
try {
|
||
const { type } = req.params;
|
||
const { cultured_type = 'cultured' } = req.query;
|
||
|
||
// 定义允许的下载类型
|
||
const allowedTypes = ['all_data', 'ph', 'temperature', 'oxygen', 'culture_medium', 'max_growth_rate'];
|
||
if (!allowedTypes.includes(type)) {
|
||
return res.status(400).json({
|
||
success: false,
|
||
message: 'Invalid download type',
|
||
allowedTypes: allowedTypes
|
||
});
|
||
}
|
||
|
||
// 根据类型定义需要查询的字段
|
||
let selectFields = '';
|
||
let headers = [];
|
||
|
||
switch (type) {
|
||
case 'all_data':
|
||
selectFields = `
|
||
m.species_dive_id,
|
||
m.microbial_name,
|
||
m.genome,
|
||
m.domain,
|
||
m.phylum,
|
||
m.class,
|
||
m.\`order\`,
|
||
m.family,
|
||
m.genus,
|
||
m.species,
|
||
m.taxonomy,
|
||
m.pH as pH,
|
||
m.temperature as temperature,
|
||
m.o2 as o2,
|
||
m.nutrition,
|
||
m.gram_stain,
|
||
m.synonym,
|
||
m.media_name,
|
||
m.media_source,
|
||
m.mediadive_id,
|
||
TRIM(CASE
|
||
WHEN m.predict_pH IS NOT NULL AND m.predict_pH != '' THEN
|
||
SUBSTRING(m.predict_pH, 1, CASE WHEN LOCATE(' ', REVERSE(m.predict_pH)) > 0 THEN LENGTH(m.predict_pH) - LOCATE(' ', REVERSE(m.predict_pH)) ELSE LENGTH(m.predict_pH) END)
|
||
ELSE NULL
|
||
END) as ppH,
|
||
TRIM(CASE
|
||
WHEN m.predict_temperature IS NOT NULL AND m.predict_temperature != '' THEN
|
||
SUBSTRING(m.predict_temperature, 1, CASE WHEN LOCATE(' ', REVERSE(m.predict_temperature)) > 0 THEN LENGTH(m.predict_temperature) - LOCATE(' ', REVERSE(m.predict_temperature)) ELSE LENGTH(m.predict_temperature) END)
|
||
ELSE NULL
|
||
END) as ptemperature,
|
||
TRIM(CASE
|
||
WHEN m.predict_o2 IS NOT NULL AND m.predict_o2 != '' THEN
|
||
SUBSTRING(m.predict_o2, 1, CASE WHEN LOCATE(' ', REVERSE(m.predict_o2)) > 0 THEN LENGTH(m.predict_o2) - LOCATE(' ', REVERSE(m.predict_o2)) ELSE LENGTH(m.predict_o2) END)
|
||
ELSE NULL
|
||
END) as po2
|
||
`;
|
||
headers = ['species_dive_id', 'microbial_name', 'genome', 'domain', 'phylum', 'class', 'order', 'family', 'genus', 'species', 'taxonomy', 'pH', 'temperature', 'o2', 'nutrition', 'gram_stain', 'synonym', 'media_name', 'media_source', 'mediadive_id', 'predict_pH', 'predict_temperature', 'predict_o2'];
|
||
break;
|
||
case 'ph':
|
||
selectFields = `
|
||
m.species_dive_id,
|
||
m.microbial_name,
|
||
m.genome,
|
||
m.domain,
|
||
m.phylum,
|
||
m.class,
|
||
m.\`order\`,
|
||
m.family,
|
||
m.genus,
|
||
m.species,
|
||
m.pH as pH,
|
||
TRIM(CASE
|
||
WHEN m.predict_pH IS NOT NULL AND m.predict_pH != '' THEN
|
||
SUBSTRING(m.predict_pH, 1, CASE WHEN LOCATE(' ', REVERSE(m.predict_pH)) > 0 THEN LENGTH(m.predict_pH) - LOCATE(' ', REVERSE(m.predict_pH)) ELSE LENGTH(m.predict_pH) END)
|
||
ELSE NULL
|
||
END) as ppH
|
||
`;
|
||
headers = ['species_dive_id', 'microbial_name', 'genome', 'domain', 'phylum', 'class', 'order', 'family', 'genus', 'species', 'pH', 'predict_pH'];
|
||
break;
|
||
case 'temperature':
|
||
selectFields = `
|
||
m.species_dive_id,
|
||
m.microbial_name,
|
||
m.genome,
|
||
m.domain,
|
||
m.phylum,
|
||
m.class,
|
||
m.\`order\`,
|
||
m.family,
|
||
m.genus,
|
||
m.species,
|
||
m.temperature as temperature,
|
||
TRIM(CASE
|
||
WHEN m.predict_temperature IS NOT NULL AND m.predict_temperature != '' THEN
|
||
SUBSTRING(m.predict_temperature, 1, CASE WHEN LOCATE(' ', REVERSE(m.predict_temperature)) > 0 THEN LENGTH(m.predict_temperature) - LOCATE(' ', REVERSE(m.predict_temperature)) ELSE LENGTH(m.predict_temperature) END)
|
||
ELSE NULL
|
||
END) as ptemperature
|
||
`;
|
||
headers = ['species_dive_id', 'microbial_name', 'genome', 'domain', 'phylum', 'class', 'order', 'family', 'genus', 'species', 'temperature', 'predict_temperature'];
|
||
break;
|
||
case 'oxygen':
|
||
selectFields = `
|
||
m.species_dive_id,
|
||
m.microbial_name,
|
||
m.genome,
|
||
m.domain,
|
||
m.phylum,
|
||
m.class,
|
||
m.\`order\`,
|
||
m.family,
|
||
m.genus,
|
||
m.species,
|
||
m.o2 as o2,
|
||
TRIM(CASE
|
||
WHEN m.predict_o2 IS NOT NULL AND m.predict_o2 != '' THEN
|
||
SUBSTRING(m.predict_o2, 1, CASE WHEN LOCATE(' ', REVERSE(m.predict_o2)) > 0 THEN LENGTH(m.predict_o2) - LOCATE(' ', REVERSE(m.predict_o2)) ELSE LENGTH(m.predict_o2) END)
|
||
ELSE NULL
|
||
END) as po2
|
||
`;
|
||
headers = ['species_dive_id', 'microbial_name', 'genome', 'domain', 'phylum', 'class', 'order', 'family', 'genus', 'species', 'o2', 'predict_o2'];
|
||
break;
|
||
case 'culture_medium':
|
||
selectFields = `
|
||
m.species_dive_id,
|
||
m.microbial_name,
|
||
m.genome,
|
||
m.domain,
|
||
m.phylum,
|
||
m.class,
|
||
m.\`order\`,
|
||
m.family,
|
||
m.genus,
|
||
m.species,
|
||
m.nutrition,
|
||
m.media_name,
|
||
m.media_source,
|
||
m.mediadive_id
|
||
`;
|
||
headers = ['species_dive_id', 'microbial_name', 'genome', 'domain', 'phylum', 'class', 'order', 'family', 'genus', 'species', 'nutrition', 'media_name', 'media_source', 'mediadive_id'];
|
||
break;
|
||
case 'max_growth_rate':
|
||
// 注意:如果数据库中没有 max_growth_rate 字段,这里需要根据实际情况调整
|
||
selectFields = `
|
||
m.species_dive_id,
|
||
m.microbial_name,
|
||
m.genome,
|
||
m.domain,
|
||
m.phylum,
|
||
m.class,
|
||
m.\`order\`,
|
||
m.family,
|
||
m.genus,
|
||
m.species
|
||
`;
|
||
headers = ['species_dive_id', 'microbial_name', 'genome', 'domain', 'phylum', 'class', 'order', 'family', 'genus', 'species'];
|
||
break;
|
||
}
|
||
|
||
// 为每种类型定义 GROUP BY 字段
|
||
let groupByFields = '';
|
||
switch (type) {
|
||
case 'all_data':
|
||
groupByFields = `m.species_dive_id, m.microbial_name, m.genome, m.domain, m.phylum, m.class, m.\`order\`, m.family, m.genus, m.species, m.taxonomy, m.pH, m.temperature, m.o2, m.nutrition, m.gram_stain, m.synonym, m.media_name, m.media_source, m.mediadive_id, m.predict_pH, m.predict_temperature, m.predict_o2`;
|
||
break;
|
||
case 'ph':
|
||
groupByFields = `m.species_dive_id, m.microbial_name, m.genome, m.domain, m.phylum, m.class, m.\`order\`, m.family, m.genus, m.species, m.pH, m.predict_pH`;
|
||
break;
|
||
case 'temperature':
|
||
groupByFields = `m.species_dive_id, m.microbial_name, m.genome, m.domain, m.phylum, m.class, m.\`order\`, m.family, m.genus, m.species, m.temperature, m.predict_temperature`;
|
||
break;
|
||
case 'oxygen':
|
||
groupByFields = `m.species_dive_id, m.microbial_name, m.genome, m.domain, m.phylum, m.class, m.\`order\`, m.family, m.genus, m.species, m.o2, m.predict_o2`;
|
||
break;
|
||
case 'culture_medium':
|
||
groupByFields = `m.species_dive_id, m.microbial_name, m.genome, m.domain, m.phylum, m.class, m.\`order\`, m.family, m.genus, m.species, m.nutrition, m.media_name, m.media_source, m.mediadive_id`;
|
||
break;
|
||
case 'max_growth_rate':
|
||
groupByFields = `m.species_dive_id, m.microbial_name, m.genome, m.domain, m.phylum, m.class, m.\`order\`, m.family, m.genus, m.species`;
|
||
break;
|
||
}
|
||
|
||
// 构建查询
|
||
const query = `
|
||
SELECT ${selectFields}
|
||
FROM cultured_data m
|
||
WHERE m.cultured_type = ?
|
||
GROUP BY ${groupByFields}
|
||
ORDER BY m.species_dive_id
|
||
`;
|
||
|
||
console.log('Execute download query for type:', type);
|
||
const [rows] = await pool.execute(query, [cultured_type]);
|
||
console.log('Download query result row count:', rows.length);
|
||
|
||
// 将数据转换为 CSV 格式
|
||
const escapeCSVValue = (value) => {
|
||
if (value === null || value === undefined) return '';
|
||
const str = String(value);
|
||
if (str.includes(',') || str.includes('"') || str.includes('\n')) {
|
||
return `"${str.replace(/"/g, '""')}"`;
|
||
}
|
||
return str;
|
||
};
|
||
|
||
const csvLines = [
|
||
headers.join(','),
|
||
...rows.map(row => {
|
||
return headers.map(header => {
|
||
// 处理字段名映射(如 pH -> pH, ppH -> predict_pH)
|
||
let value = row[header];
|
||
if (value === undefined && header === 'predict_pH') {
|
||
value = row['ppH'];
|
||
} else if (value === undefined && header === 'predict_temperature') {
|
||
value = row['ptemperature'];
|
||
} else if (value === undefined && header === 'predict_o2') {
|
||
value = row['po2'];
|
||
}
|
||
return escapeCSVValue(value);
|
||
}).join(',');
|
||
})
|
||
];
|
||
|
||
const csvContent = csvLines.join('\n');
|
||
|
||
// 设置响应头
|
||
res.setHeader('Content-Type', 'text/csv; charset=utf-8');
|
||
res.setHeader('Content-Disposition', `attachment; filename="${type}.csv"`);
|
||
res.send(csvContent);
|
||
|
||
} catch (error) {
|
||
console.error('Error downloading data:', error);
|
||
res.status(500).json({
|
||
success: false,
|
||
message: 'Failed to download data',
|
||
error: error.message
|
||
});
|
||
}
|
||
});
|
||
|
||
module.exports = router;
|