上傳文件路由:
var express = require('express');
var router = express.Router();
const multer = require('multer');
const fs = require('fs');
const path = require('path');// 確保上傳目錄存在
const uploadDir = path.join(__dirname, '../backend/uploads');
const tempDir = path.join(uploadDir, 'temp');
if (!fs.existsSync(uploadDir)) {fs.mkdirSync(uploadDir, { recursive: true });
}
if (!fs.existsSync(tempDir)) {fs.mkdirSync(tempDir, { recursive: true });
}// 創建文件哈希存儲
const hashFilePath = path.join(__dirname, '../backend/fileHashes.json');
let fileHashes = {};try {if (fs.existsSync(hashFilePath)) {const data = fs.readFileSync(hashFilePath, 'utf8');fileHashes = JSON.parse(data || '{}');}
} catch (err) {console.error('Error reading hash file:', err);
}// 保存文件哈希
function saveFileHashes() {fs.writeFileSync(hashFilePath, JSON.stringify(fileHashes, null, 2), 'utf8');
}// 配置Multer進行切片上傳 - 修復:使用查詢參數
const chunkStorage = multer.diskStorage({destination: (req, file, cb) => {// 從查詢參數中獲取fileHashconst fileHash = req.query.fileHash;if (!fileHash) return cb(new Error('Missing file hash'));const chunkDir = path.join(tempDir, fileHash);if (!fs.existsSync(chunkDir)) {fs.mkdirSync(chunkDir, { recursive: true });}cb(null, chunkDir);},filename: (req, file, cb) => {// 從查詢參數中獲取chunkIndexconst chunkIndex = req.query.chunkIndex;cb(null, `chunk-${chunkIndex}`);},
});const uploadChunk = multer({ storage: chunkStorage });// 文件檢查端點
router.post('/check', express.json(), (req, res) => {const { fileName, fileSize, fileHash, algorithm = 'MD5' } = req.body;if (!fileHash) {return res.status(400).json({exists: false,message: '缺少文件哈希值',});}// 檢查文件哈希是否已存在if (fileHashes[fileHash]) {return res.json({exists: true,message: `文件已存在: ${fileHashes[fileHash].fileName}`,});}res.json({exists: false,message: '文件不存在,可以上傳',});
});// 檢查切片端點
router.post('/checkChunks', express.json(), (req, res) => {const { fileHash, totalChunks } = req.body;if (!fileHash) {return res.status(400).json({message: '缺少文件哈希值',});}const chunkDir = path.join(tempDir, fileHash);const uploadedChunks = [];if (fs.existsSync(chunkDir)) {const files = fs.readdirSync(chunkDir);files.forEach((file) => {if (file.startsWith('chunk-')) {const chunkIndex = parseInt(file.split('-')[1]);if (!isNaN(chunkIndex)) {uploadedChunks.push(chunkIndex);}}});}res.json({uploadedChunks,message: `已找到 ${uploadedChunks.length}/${totalChunks} 個切片`,});
});// 切片上傳端點 - 增加完整性檢查
router.post('/uploadChunk', uploadChunk.single('file'), (req, res) => {if (!req.file) {return res.status(400).json({success: false,message: '沒有切片被上傳',});}// 從查詢參數中獲取值const fileHash = req.query.fileHash;const chunkIndex = req.query.chunkIndex;const expectedSize = parseInt(req.query.chunkSize);if (!fileHash || !chunkIndex || isNaN(expectedSize)) {// 清理無效上傳try {if (req.file.path) {fs.unlinkSync(req.file.path);}} catch (err) {console.error('刪除無效切片失敗:', err);}return res.status(400).json({success: false,message: '缺少必要參數',});}try {// 驗證切片大小const stats = fs.statSync(req.file.path);if (stats.size !== expectedSize) {fs.unlinkSync(req.file.path);return res.status(400).json({success: false,message: `切片大小不匹配: 預期 ${expectedSize} 字節, 實際 ${stats.size} 字節`,expectedSize,actualSize: stats.size,});}res.json({success: true,message: '切片上傳成功',chunkIndex: parseInt(chunkIndex),fileHash,});} catch (err) {console.error(`切片驗證失敗: ${req.file.path}`, err);try {if (fs.existsSync(req.file.path)) {fs.unlinkSync(req.file.path);}} catch (cleanupErr) {console.error('刪除切片失敗:', cleanupErr);}res.status(500).json({success: false,message: '切片驗證失敗',error: err.message,});}
});// 合并切片端點
router.post('/merge', express.json(), (req, res) => {const { fileHash, fileName, totalChunks } = req.body;if (!fileHash || !fileName || totalChunks === undefined) {return res.status(400).json({success: false,message: '缺少必要參數',});}const chunkDir = path.join(tempDir, fileHash);const mergedFilePath = path.join(uploadDir, `${fileHash}-${fileName}`);// 驗證所有切片是否都存在let allChunksExist = true;for (let i = 0; i < totalChunks; i++) {const chunkPath = path.join(chunkDir, `chunk-${i}`);if (!fs.existsSync(chunkPath)) {allChunksExist = false;break;}}if (!allChunksExist) {return res.status(400).json({success: false,message: '部分切片缺失,無法合并',});}// 合并文件try {const writeStream = fs.createWriteStream(mergedFilePath);const mergeChunks = (index) => {if (index >= totalChunks) {writeStream.end(() => {// 合并完成后刪除臨時目錄fs.rm(chunkDir, { recursive: true }, (err) => {if (err) console.error('刪除臨時目錄失敗:', err);// 記錄文件信息const stats = fs.statSync(mergedFilePath);fileHashes[fileHash] = {fileName: fileName,filePath: mergedFilePath,fileSize: stats.size,uploadDate: new Date().toISOString(),hash: fileHash,algorithm: 'MD5',};saveFileHashes();res.json({success: true,message: '文件合并成功',filePath: mergedFilePath,});});});return;}const chunkPath = path.join(chunkDir, `chunk-${index}`);const readStream = fs.createReadStream(chunkPath);readStream.pipe(writeStream, { end: false });readStream.on('end', () => {// 刪除已合并的切片fs.unlink(chunkPath, (err) => {if (err) console.error(`刪除切片 ${index} 失敗:`, err);mergeChunks(index + 1);});});readStream.on('error', (err) => {writeStream.close();console.error(`讀取切片 ${index} 失敗:`, err);res.status(500).json({success: false,message: '合并文件失敗',error: err.message,});});};mergeChunks(0);} catch (err) {console.error('合并文件失敗:', err);res.status(500).json({success: false,message: '合并文件失敗',error: err.message,});}
});module.exports = router;