PatternsAssets Engine
File Chunking
Split large files into chunks for reliable uploads with automatic retry logic.
The Problem
Uploading large files (over 100MB) in a single request often fails due to network timeouts or server limits. When it fails, the entire upload must restart.
Solution
interface ChunkUploadConfig {
file: File;
chunkSize: number;
uploadUrl: string;
onProgress?: (percent: number) => void;
}
async function uploadFileInChunks(config: ChunkUploadConfig): Promise<void> {
const { file, chunkSize, uploadUrl, onProgress } = config;
const totalChunks = Math.ceil(file.size / chunkSize);
for (let index = 0; index < totalChunks; index++) {
const start = index * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
await uploadChunkWithRetry(chunk, index, totalChunks, uploadUrl);
onProgress?.(((index + 1) / totalChunks) * 100);
}
}
async function uploadChunkWithRetry(
chunk: Blob,
index: number,
total: number,
url: string,
attempt = 1
): Promise<void> {
const maxRetries = 3;
try {
const formData = new FormData();
formData.append('chunk', chunk);
formData.append('index', index.toString());
formData.append('total', total.toString());
const response = await fetch(url, {
method: 'POST',
body: formData
});
if (!response.ok) throw new Error(`HTTP ${response.status}`);
} catch (error) {
if (attempt < maxRetries) {
const delay = Math.pow(2, attempt) * 1000; // Exponential backoff
await new Promise(resolve => setTimeout(resolve, delay));
return uploadChunkWithRetry(chunk, index, total, url, attempt + 1);
}
throw new Error(`Chunk ${index} failed after ${maxRetries} attempts`);
}
}
// Usage
await uploadFileInChunks({
file: selectedFile,
chunkSize: 5 * 1024 * 1024, // 5MB
uploadUrl: '/api/upload/chunk',
onProgress: (percent) => console.log(`${percent.toFixed(1)}%`)
});Performance Note
Benefit: Uploads can resume from the last successful chunk instead of restarting. Memory usage stays constant regardless of file size (only one chunk in RAM at a time).