? ? createChunk.js
? ?Spark-md5計算文件各分片MD5生成文件指紋
可以幫助我們更加方便地進行文件哈希計算和文件完整性檢測等操作。
import sparkMd5 from './sparkmd5.js'export function createChunk(file, index, chunkSize) {return new Promise((resolve, reject) => {const start = index * chunkSize;const end = start + chunkSize;const spark = new sparkMd5.ArrayBuffer();const fileReader = new FileReader();const blob = file.slice(start, end);fileReader.onload = (e) => {spark.append(e.target.result);//耗時阻塞resolve({start, end, index, hash: spark.end(), blob});};fileReader.readAsArrayBuffer(blob);})}
cutFile.js
import {createChunk} from './createChunk.js'const CHUNK_SIZE = 1024 * 1024 * 5;export async function cutFile(file) {const result = [];/* 普通寫法 速度慢 md5阻塞*/// const chunkCount = Math.ceil(file.size / CHUNK_SIZE);// for (let i = 0; i < chunkCount; i++) {// const chunk = await createChunk(file, i, CHUNK_SIZE);// result.push(chunk)// }new Promise((resolve, reject) => {const finishCount = 0;//完成數量const chunkCount = Math.ceil(file.size / CHUNK_SIZE);/* 多線程寫法 */const THREAD_COUNT = navigator.hardwareConcurrency || 4;//獲取線程數const threadChunkCount = Math.ceil(chunkCount / THREAD_COUNT)//分發計算每個線程的分片數量for (let i = 0; i < threadChunkCount; i++) {//創建多線程,并分配任務const worker = new Worker('./worker.js', { type: 'module' });let end = (i + 1) * threadChunkCount;const start = i * threadChunkCount;if (end > chunkCount) {end = chunkCount}worker.postMessage({file,CHUNK_SIZE,startChunkIndex: start,endChunkIndex: end});worker.onmessage = e => {for (let i = start; i < end; i++) {result[i] = e.data[i - start]//分片結果以次放到對應下標}if (finishCount === THREAD_COUNT) {resolve(result)}}}})}
worker.js 線程文件
onmessage = async (e) => {const {file,CHUNK_SIZE,startChunkIndex: start,endChunkIndex: end,} =e.data;const proms=[];for(let i=start;i<end;i++){proms.push(createChunk(file,i,CHUNK_SIZE));}const t= await Promise.all(proms);//等待分片完成postMessage(t)//分片結果傳遞給主線程};
應用
//oncheange 上傳文件的按鈕import {cutFile} from './cutFile.js'async function oncheange(e) {const file = e.target.files[0];const chunks = await cutFile(file)}