Real code, real numbers, real architectures
How YouTube handles 500 hours of video per minute
// Browser: Split video into 5MB chunks for resumable upload
async function uploadVideo(file: File) {
const CHUNK_SIZE = 5 * 1024 * 1024; // 5MB
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
// Get upload session from server
const { uploadId, uploadUrl } = await fetch('/api/upload/init', {
method: 'POST',
body: JSON.stringify({
fileName: file.name,
fileSize: file.size,
mimeType: file.type
})
}).then(r => r.json());
// Upload each chunk with retry logic
for (let i = 0; i < totalChunks; i++) {
const start = i * CHUNK_SIZE;
const end = Math.min(start + CHUNK_SIZE, file.size);
const chunk = file.slice(start, end);
await uploadChunkWithRetry(uploadUrl, uploadId, i, chunk, {
maxRetries: 3,
onProgress: (pct) => updateProgress(i, pct)
});
}
// Signal completion
return fetch('/api/upload/complete', {
method: 'POST',
body: JSON.stringify({ uploadId })
});
}Why chunked upload? • Resume after network failure (don't restart from 0) • Show real progress (not just spinner) • Server can process chunks in parallel • Memory efficient (don't load 4GB into RAM) Key numbers: • 5MB chunk = 2-5 seconds on average connection • Max 3 retries per chunk = handles flaky networks • Parallel chunk upload = 3-4x faster on good connections