PatternsAssets Engine
Client-side Hash (SHA-256)
Generate unique file hashes to prevent duplicate uploads and verify integrity.
The Problem
Users may upload the same file multiple times, wasting storage. Also, you need to verify files weren't corrupted during transfer.
Solution
async function generateFileHash(file: File): Promise<string> {
const arrayBuffer = await file.arrayBuffer();
const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer);
// Convert to hex string
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
return hashHex;
}
// With progress tracking for large files
async function generateHashWithProgress(
file: File,
onProgress: (percent: number) => void
): Promise<string> {
const chunkSize = 2 * 1024 * 1024; // 2MB chunks
const chunks: Uint8Array[] = [];
let loaded = 0;
// Read file in chunks to show progress
for (let start = 0; start < file.size; start += chunkSize) {
const chunk = file.slice(start, start + chunkSize);
const buffer = await chunk.arrayBuffer();
chunks.push(new Uint8Array(buffer));
loaded += buffer.byteLength;
onProgress((loaded / file.size) * 100);
}
// Combine and hash
const totalLength = chunks.reduce((sum, c) => sum + c.length, 0);
const combined = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
combined.set(chunk, offset);
offset += chunk.length;
}
const hashBuffer = await crypto.subtle.digest('SHA-256', combined);
const hashArray = Array.from(new Uint8Array(hashBuffer));
return hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
}
// Deduplication example
const hashCache = new Map<string, boolean>();
async function uploadWithDeduplication(file: File): Promise<void> {
const hash = await generateFileHash(file);
if (hashCache.has(hash)) {
console.log('File already uploaded, skipping...');
return;
}
// Check server-side
const checkResponse = await fetch(`/api/files/check/${hash}`);
const { exists } = await checkResponse.json();
if (exists) {
console.log('File exists on server, skipping upload');
hashCache.set(hash, true);
return;
}
// Upload
const formData = new FormData();
formData.append('file', file);
formData.append('hash', hash);
await fetch('/api/upload', { method: 'POST', body: formData });
hashCache.set(hash, true);
}Performance Note
Benefit: Prevents duplicate uploads, saving storage costs. A 100MB file generates its hash in ~500ms on modern devices. Server can verify integrity by comparing hashes.