跳到主要内容

Large File Upload Optimization Strategies

大文件上传优化策略

  1. 分块/可续传上传 ⭐(推荐)

Implementation Overview  实施概述

Break large files into smaller chunks (1-5MB each) and upload them sequentially or in parallel.
将大文件分割成较小的块(每个 1-5MB),然后依次或并行上传。

Benefits  好处

  • Resumable: If connection fails, resume from last successful chunk
    支持断点续传:连接失败时可从上次成功的块继续上传
  • Progress tracking: Real-time upload progress
    进度追踪:实时上传进度
  • Better UX: User can continue using the app while uploading
    更佳用户体验:用户可以在上传时继续使用应用
  • Network resilience: Small chunks are less likely to fail
    网络韧性:小块更不易失败

Basic Implementation  基本实现

javascript

// Frontend - Chunked Upload
async function uploadLargeFile(file, chunkSize = 2 * 1024 * 1024) { // 2MB chunks
const totalChunks = Math.ceil(file.size / chunkSize);
const uploadId = generateUniqueId();

for (let chunkIndex = 0; chunkIndex < totalChunks; chunkIndex++) {
const start = chunkIndex * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);

await uploadChunk(chunk, chunkIndex, totalChunks, uploadId);
updateProgress((chunkIndex + 1) / totalChunks * 100);
}

// Finalize upload
const fileUrl = await finalizeUpload(uploadId, file.name);
return fileUrl;
}

async function uploadChunk(chunk, chunkIndex, totalChunks, uploadId) {
const formData = new FormData();
formData.append('chunk', chunk);
formData.append('chunkIndex', chunkIndex);
formData.append('totalChunks', totalChunks);
formData.append('uploadId', uploadId);

const response = await fetch('/api/upload-chunk', {
method: 'POST',
body: formData
});

if (!response.ok) {
throw new Error(`Chunk ${chunkIndex} upload failed`);
}
}

2. Background/Asynchronous Upload

  1. 背景/异步上传

Implementation  实现

Start upload immediately when file is selected, continue in background while user navigates.
文件选择后立即开始上传,在用户导航时在后台继续上传。

javascript

// Start upload immediately on file selection
document.getElementById('fileInput').addEventListener('change', (e) => {
const file = e.target.files[0];
if (file) {
// Start background upload
const uploadPromise = startBackgroundUpload(file);

// Store upload promise reference
window.pendingUpload = uploadPromise;

// Show upload progress
showUploadProgress(uploadPromise);
}
});

// Check upload status on page B
async function checkUploadStatus() {
const uploadId = localStorage.getItem('currentUploadId');
if (uploadId) {
const status = await fetch(`/api/upload-status/${uploadId}`);
const { isComplete, progress, fileUrl } = await status.json();

if (isComplete) {
return fileUrl;
} else {
// Still uploading, show progress
showUploadProgress(progress);
return null;
}
}
}

3. Client-Side Compression

  1. 客户端压缩

Implementation  实现

Compress files before upload to reduce transfer time.
在上传前压缩文件以减少传输时间。

javascript

// Using JSZip for compression
async function compressAndUpload(file) {
const zip = new JSZip();
zip.file(file.name, file);

const compressedBlob = await zip.generateAsync({
type: "blob",
compression: "DEFLATE",
compressionOptions: { level: 6 }
});

console.log(`Original: ${file.size} bytes, Compressed: ${compressedBlob.size} bytes`);

return await uploadFile(compressedBlob, file.name + '.zip');
}

4. Direct-to-Cloud Upload with Presigned URLs

  1. 使用预签名 URL 的直达云端上传

Implementation  实现

Upload directly to cloud storage (S3, GCS) bypassing your server.
直接上传到云存储(S3、GCS),绕过您的服务器。

javascript

// Get presigned URL from your server
async function getPresignedUrl(fileName, fileType) {
const response = await fetch('/api/presigned-url', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ fileName, fileType })
});
return await response.json();
}

// Upload directly to S3
async function uploadToS3(file) {
const { uploadUrl, fileKey } = await getPresignedUrl(file.name, file.type);

const response = await fetch(uploadUrl, {
method: 'PUT',
body: file,
headers: {
'Content-Type': file.type
}
});

if (response.ok) {
return fileKey; // Use this to retrieve file later
}
}

5. Upload Queue Management

  1. 上传队列管理

Implementation  实现

Handle multiple files and retry failed uploads.
处理多个文件并重试失败的上传。

javascript

class UploadQueue {
constructor() {
this.queue = [];
this.activeUploads = 0;
this.maxConcurrent = 2;
}

async addFile(file) {
const uploadTask = {
file,
id: generateUniqueId(),
status: 'pending',
progress: 0,
retries: 0
};

this.queue.push(uploadTask);
this.processQueue();

return uploadTask.id;
}

async processQueue() {
if (this.activeUploads >= this.maxConcurrent) return;

const task = this.queue.find(t => t.status === 'pending');
if (!task) return;

task.status = 'uploading';
this.activeUploads++;

try {
await this.uploadWithRetry(task);
task.status = 'completed';
} catch (error) {
task.status = 'failed';
console.error('Upload failed:', error);
} finally {
this.activeUploads--;
this.processQueue(); // Process next item
}
}

async uploadWithRetry(task, maxRetries = 3) {
while (task.retries < maxRetries) {
try {
return await this.uploadFile(task);
} catch (error) {
task.retries++;
if (task.retries >= maxRetries) throw error;

// Exponential backoff
await new Promise(resolve =>
setTimeout(resolve, Math.pow(2, task.retries) * 1000)
);
}
}
}
}

6. Progressive Enhancement Strategy

  1. 逐步增强策略

Complete Solution Architecture

完整的解决方案架构

javascript

class SmartFileUploader {
constructor() {
this.chunkSize = 2 * 1024 * 1024; // 2MB
this.maxRetries = 3;
this.compressionThreshold = 10 * 1024 * 1024; // 10MB
}

async uploadFile(file) {
// Step 1: Check if compression would help
const shouldCompress = file.size > this.compressionThreshold &&
this.canCompress(file.type);

let fileToUpload = file;
if (shouldCompress) {
fileToUpload = await this.compressFile(file);
}

// Step 2: Choose upload strategy based on file size
if (fileToUpload.size > 5 * 1024 * 1024) { // 5MB
return await this.chunkedUpload(fileToUpload, file.name);
} else {
return await this.simpleUpload(fileToUpload);
}
}

async chunkedUpload(file, originalName) {
const uploadId = generateUniqueId();
const totalChunks = Math.ceil(file.size / this.chunkSize);

// Store upload metadata
localStorage.setItem('uploadProgress', JSON.stringify({
uploadId,
totalChunks,
completedChunks: 0,
originalName
}));

for (let i = 0; i < totalChunks; i++) {
await this.uploadChunkWithRetry(file, i, totalChunks, uploadId);

// Update progress
const progress = JSON.parse(localStorage.getItem('uploadProgress'));
progress.completedChunks = i + 1;
localStorage.setItem('uploadProgress', JSON.stringify(progress));

// Emit progress event
this.onProgress && this.onProgress((i + 1) / totalChunks * 100);
}

// Finalize upload
const result = await this.finalizeUpload(uploadId, originalName);
localStorage.removeItem('uploadProgress');

return result;
}

// Resume interrupted upload
async resumeUpload() {
const progress = JSON.parse(localStorage.getItem('uploadProgress'));
if (!progress) return null;

const { uploadId, totalChunks, completedChunks, originalName } = progress;

// Check server status
const serverStatus = await this.checkUploadStatus(uploadId);
if (serverStatus.completed) {
localStorage.removeItem('uploadProgress');
return serverStatus.fileUrl;
}

// Resume from where we left off
// Implementation continues...
}
}

  性能比较Performance Comparison  性能比较

Strategy  策略Upload Speed  上传速度User Experience  用户体验Complexity  复杂性Best For  最佳用途
Basic Upload  基本上传Slow  慢Poor (blocking)  低(阻塞)Low  低Files < 5MB  小于 5MB 的文件
Chunked Upload  分块上传Good  好Excellent  优秀Medium  中等Files > 5MB  文件 > 5MB
Background Upload  背景上传Same  相同Good  好Medium  中等Any size  任意大小
Compression + Chunked  压缩 + 分块Fast  快速Excellent  优秀High  高Large compressible files  大型可压缩文件
Direct-to-Cloud  直连云端Very Fast  非常快Good  良好Medium  中等Any size, scalable  任意大小,可扩展

Implementation Timeline  实施时间表

Strategy  战略Development Time  开发时间Benefits  好处
Basic Chunking  基本分块2-3 days  2-3 天Resume capability, progress tracking 恢复能力,进度跟踪
Background Upload  后台上传1-2 days  1-2 天Better UX, non-blocking  更好的用户体验,非阻塞
Compression  压缩1 day  1 天Faster uploads for compressible files 更快的可压缩文件上传
Direct-to-Cloud  直达云端2-4 days  2-4 天Bypasses server, scales better 绕过服务器,扩展性更好
Complete Solution  完整解决方案1-2 weeks  1-2 周Production-ready, all features 生产就绪,所有功能

推荐的实施顺序

  1. Start with chunked uploads - Biggest impact for large files
    从分块上传开始——对大文件影响最大
  2. Add background uploading - Improves user experience
    添加后台上传——改善用户体验
  3. Implement compression - For files that compress well
    实现压缩——适用于压缩效果好的文件
  4. Add direct-to-cloud - If you need to scale
    添加直传云端——如果需要扩展规模
  5. Build upload queue - For handling multiple files
    构建上传队列 - 用于处理多个文件

claude 方案