当前位置: 首页 > news >正文

【大文件上传】分片上传+断点续传+Worker线程计算Hash

在这里插入图片描述

/*** 文件分片上传管理器* 提供文件分片、哈希计算、并发上传和断点续传功能*/
class FileChunkUploader {/*** 构造函数* @param {File} file - 要上传的文件对象* @param {Object} options - 配置选项* @param {number} [options.chunkSize=5MB] - 每个分片的大小(字节)* @param {number} [options.maxConcurrency=3] - 最大并发上传数*/constructor(file, options = {}) {this.sourceFile = file;this.config = {chunkSize: options.chunkSize || 5 << 20, // 默认5MBmaxConcurrency: options.threads || 3, // 并发数hashWorkerPath: options.hashWorkerPath || 'hash-worker.js'};this.uploadState = {totalChunks: Math.ceil(file.size / this.config.chunkSize),uploadedChunkIndices: new Set(),fileHash: null,uploadSessionId: this._generateUniqueId()};}/*** 启动上传流程* @returns {Promise<Object>} 上传结果*/async startUpload() {try {// 1. 计算文件哈希this.uploadState.fileHash = await this._calculateFileHash();// 2. 检查是否可秒传if (await this._checkForInstantUpload()) {return { success: true, skipped: true, reason: '文件已存在' };}// 3. 获取已上传分片进度await this._fetchUploadProgress();// 4. 执行分片上传return await this._uploadAllChunks();} catch (error) {console.error('上传失败:', error);throw new UploadError('UPLOAD_FAILED', { cause: error });}}/*** 使用Web Worker计算文件哈希* @private* @returns {Promise<string>} 文件哈希值*/async _calculateFileHash() {return new Promise((resolve) => {const worker = new Worker(this.config.hashWorkerPath);worker.postMessage({ file: this.sourceFile,operation: 'hash'});worker.onmessage = (event) => {if (event.data.progress) {this._updateProgress(event.data.progress);} else if (event.data.hash) {resolve(event.data.hash);worker.terminate();}};});}/*** 检查服务器是否已存在该文件* @private* @returns {Promise<boolean>}*/async _checkForInstantUpload() {const response = await fetch(`/api/files/check?hash=${this.uploadState.fileHash}`);const { exists } = await response.json();return exists;}/*** 获取已上传分片信息* @private*/async _fetchUploadProgress() {try {const response = await fetch(`/api/uploads/progress?sessionId=${this.uploadState.uploadSessionId}`);const { uploadedChunks } = await response.json();uploadedChunks.forEach(index => {this.uploadState.uploadedChunkIndices.add(index);});} catch (error) {console.warn('获取上传进度失败,将重新上传所有分片', error);}}/*** 上传所有未完成的分片* @private*/async _uploadAllChunks() {const pendingChunks = this._getPendingChunks();await this._uploadWithConcurrencyControl(pendingChunks);return this._finalizeUpload();}/*** 获取待上传的分片索引* @private* @returns {number[]}*/_getPendingChunks() {return Array.from({ length: this.uploadState.totalChunks },(_, index) => index).filter(index => !this.uploadState.uploadedChunkIndices.has(index));}/*** 带并发控制的分片上传* @private* @param {number[]} chunkIndices - 待上传分片索引*/async _uploadWithConcurrencyControl(chunkIndices) {const activeUploads = new Set();for (const chunkIndex of chunkIndices) {const uploadPromise = this._uploadSingleChunk(chunkIndex).finally(() => activeUploads.delete(uploadPromise));activeUploads.add(uploadPromise);if (activeUploads.size >= this.config.maxConcurrency) {await Promise.race(activeUploads);}}await Promise.all(activeUploads);}/*** 上传单个分片* @private* @param {number} chunkIndex - 分片索引* @param {number} [maxRetries=3] - 最大重试次数*/async _uploadSingleChunk(chunkIndex, maxRetries = 3) {let attempt = 0;while (attempt < maxRetries) {try {const chunkData = this._getChunkData(chunkIndex);await this._sendChunkToServer(chunkIndex, chunkData);this.uploadState.uploadedChunkIndices.add(chunkIndex);this._saveProgressLocally();return;} catch (error) {attempt++;if (attempt >= maxRetries) {throw new UploadError('CHUNK_UPLOAD_FAILED', {chunkIndex,attempts: maxRetries,error});}}}}/*** 获取分片数据* @private* @param {number} chunkIndex * @returns {Blob}*/_getChunkData(chunkIndex) {const start = chunkIndex * this.config.chunkSize;const end = Math.min(start + this.config.chunkSize, this.sourceFile.size);return this.sourceFile.slice(start, end);}/*** 发送分片到服务器* @private*/async _sendChunkToServer(chunkIndex, chunkData) {const formData = new FormData();formData.append('file', chunkData);formData.append('chunkIndex', chunkIndex);formData.append('totalChunks', this.uploadState.totalChunks);formData.append('fileHash', this.uploadState.fileHash);formData.append('sessionId', this.uploadState.uploadSessionId);const response = await fetch('/api/uploads/chunk', {method: 'POST',body: formData});if (!response.ok) {throw new Error(`服务器返回错误: ${response.status}`);}}/*** 完成上传并合并分片* @private*/async _finalizeUpload() {const response = await fetch('/api/uploads/complete', {method: 'POST',headers: {'Content-Type': 'application/json'},body: JSON.stringify({fileHash: this.uploadState.fileHash,sessionId: this.uploadState.uploadSessionId,totalChunks: this.uploadState.totalChunks})});if (!response.ok) {throw new Error('合并分片失败');}return { success: true };}/*** 生成唯一ID* @private*/_generateUniqueId() {return Math.random().toString(36).substring(2) + Date.now().toString(36);}/*** 本地保存上传进度* @private*/_saveProgressLocally() {const progressData = {sessionId: this.uploadState.uploadSessionId,uploadedChunks: Array.from(this.uploadState.uploadedChunkIndices),timestamp: Date.now()};localStorage.setItem(`uploadProgress_${this.uploadState.fileHash}`,JSON.stringify(progressData));}
}/*** 上传错误类*/
class UploadError extends Error {constructor(code, details = {}) {super();this.name = 'UploadError';this.code = code;this.details = details;this.message = this._getErrorMessage();}_getErrorMessage() {const messages = {'UPLOAD_FAILED': '文件上传失败','CHUNK_UPLOAD_FAILED': `分片上传失败 (尝试次数: ${this.details.attempts})`,'NETWORK_ERROR': '网络连接出现问题'};return messages[this.code] || '发生未知错误';}
}
// hash-worker.js
// 导入 SparkMD5 库用于计算文件哈希
self.importScripts('spark-md5.min.js');// 监听主线程消息
self.onmessage = async (event) => {const file = event.data.file;const chunkSize = 2 * 1024 * 1024; // 2MB 的切片大小const totalChunks = Math.ceil(file.size / chunkSize);const hasher = new self.SparkMD5.ArrayBuffer();// 分片计算文件哈希for (let currentChunk = 0; currentChunk < totalChunks; currentChunk++) {const chunkData = await getFileChunk(file, currentChunk * chunkSize, chunkSize);hasher.append(chunkData);// 向主线程发送进度更新self.postMessage({ progress: (currentChunk + 1) / totalChunks });}// 计算完成发送最终哈希值self.postMessage({ hash: hasher.end() });
};/*** 读取文件指定分片* @param {File} file - 目标文件* @param {number} start - 起始字节位置* @param {number} length - 分片长度* @returns {Promise<ArrayBuffer>}*/
function getFileChunk(file, start, length) {return new Promise((resolve) => {const reader = new FileReader();reader.onload = (loadEvent) => resolve(loadEvent.target.result);reader.readAsArrayBuffer(file.slice(start, start + length));});
}
http://www.dtcms.com/a/330669.html

相关文章:

  • Bean的生命周期
  • (2-10-1)MyBatis的基础与基本使用
  • 【word】一次选中全部表格,宏方法
  • [工具]vscode 使用AI 优化代码
  • week1-[分支结构]中位数
  • AI技术产品化:核心认知与实战指南
  • 【深度学习计算性能】04:硬件
  • 集成电路学习:什么是Machine Learning机器学习
  • 云原生存储架构设计与性能优化
  • 自动驾驶轨迹规划算法——Apollo OpenSpace Planner
  • 利用GISBox完成超图S3M与OSGB三维模型格式的转换
  • Elasticsearch 中如何配置 RBAC 权限-实现安全的访问控制
  • 现在都是APP,小程序抢购,支持浏览器不支持 SSE
  • GPT-5 提示词优化全攻略:用 Prompt Optimizer 快速迁移与提升,打造更稳更快的智能应用
  • LeetCode——Hot 100【合并区间 最大子数组和】
  • 算法148. 排序链表
  • 希尔排序专栏
  • docker compose部署mysql
  • UniVoc:基于二维矩阵映射的多语言词汇表系统
  • ESP32单片机与STM32单片机协同完成工业控制智能化改造
  • Uniapp 条件编译详解
  • 用Python从零开始实现机器学习Baseline
  • 文件服务器:FTP
  • Inconsistent vendoring detected. Please re-run “go mod vendor“.
  • QML开发:画布元素
  • 数学建模-非线性规划模型
  • Spring AI PagePdfDocumentReader 全解析:基于 Apache PdfBox 的按页 PDF 读取实战
  • PDF压缩原理详解:如何在不失真的前提下减小文件体积?
  • 高分辨率PDF压缩技巧:保留可读性的最小体积方案
  • PDF 段落提取利器:Spring AI 的 ParagraphPdfDocumentReader 实战