Uploading Files via JS Fetch API Complete Guide
A complete guide to uploading files via the JavaScript Fetch API. Covers FormData construction, single and multiple file uploads, drag-and-drop integration, upload progress tracking, chunked uploads for large files, validation before upload, and server-side handling expectations.
File uploading with the Fetch API uses FormData to construct multipart requests. The browser handles encoding, boundary generation, and streaming automatically. This guide covers every file upload pattern from simple single-file uploads to chunked large-file transfers.
Basic Single File Upload
const fileInput = document.getElementById("file-input");
async function uploadFile(file) {
const formData = new FormData();
formData.append("file", file);
const response = await fetch("/api/upload", {
method: "POST",
body: formData,
// Do NOT set Content-Type — browser sets it with boundary
});
if (!response.ok) {
throw new Error(`Upload failed: ${response.status}`);
}
return response.json();
}
fileInput.addEventListener("change", async (event) => {
const file = event.target.files[0];
if (file) {
const result = await uploadFile(file);
console.log("Uploaded:", result.url);
}
});The critical rule: never set the Content-Type header manually when using FormData. The browser must generate it with the multipart boundary.
Multiple File Upload
async function uploadMultiple(files) {
const formData = new FormData();
// Append each file with the same field name
for (const file of files) {
formData.append("files", file);
}
// Add metadata alongside files
formData.append("folder", "documents");
formData.append("tags", JSON.stringify(["report", "2026"]));
const response = await fetch("/api/upload/batch", {
method: "POST",
body: formData,
});
if (!response.ok) {
throw new Error(`Batch upload failed: ${response.status}`);
}
return response.json();
}
// From a multi-select input
const input = document.getElementById("multi-file");
input.addEventListener("change", (e) => {
uploadMultiple(e.target.files);
});Client-Side Validation
Validate files before uploading to save bandwidth and provide instant feedback:
function validateFile(file, rules = {}) {
const errors = [];
const {
maxSize = 10 * 1024 * 1024, // 10 MB default
allowedTypes = [],
allowedExtensions = [],
} = rules;
// Size check
if (file.size > maxSize) {
const maxMB = (maxSize / (1024 * 1024)).toFixed(1);
errors.push(`File size ${(file.size / (1024 * 1024)).toFixed(1)} MB exceeds limit of ${maxMB} MB`);
}
// MIME type check
if (allowedTypes.length > 0 && !allowedTypes.includes(file.type)) {
errors.push(`File type "${file.type}" is not allowed. Accepted: ${allowedTypes.join(", ")}`);
}
// Extension check
if (allowedExtensions.length > 0) {
const ext = file.name.split(".").pop().toLowerCase();
if (!allowedExtensions.includes(ext)) {
errors.push(`Extension ".${ext}" is not allowed. Accepted: ${allowedExtensions.join(", ")}`);
}
}
return { valid: errors.length === 0, errors };
}
// Usage
const validation = validateFile(file, {
maxSize: 5 * 1024 * 1024,
allowedTypes: ["image/jpeg", "image/png", "image/webp"],
allowedExtensions: ["jpg", "jpeg", "png", "webp"],
});
if (!validation.valid) {
console.error(validation.errors);
return;
}| Validation | Client-Side | Server-Side |
|---|---|---|
| File size | Fast feedback | Enforce hard limit |
| MIME type | Quick filter | Verify with magic bytes |
| Extension | UI-level filter | Whitelist enforcement |
| Virus scan | Not possible | Required for security |
Always validate on both client and server. Client-side validation is a UX convenience, not a security measure.
Drag-and-Drop Upload
const dropZone = document.getElementById("drop-zone");
dropZone.addEventListener("dragover", (e) => {
e.preventDefault();
dropZone.classList.add("drag-active");
});
dropZone.addEventListener("dragleave", () => {
dropZone.classList.remove("drag-active");
});
dropZone.addEventListener("drop", async (e) => {
e.preventDefault();
dropZone.classList.remove("drag-active");
const files = Array.from(e.dataTransfer.files);
if (files.length === 0) return;
for (const file of files) {
const validation = validateFile(file, { maxSize: 10 * 1024 * 1024 });
if (!validation.valid) {
showError(file.name, validation.errors);
continue;
}
await uploadFile(file);
}
});Image Preview Before Upload
function previewImage(file) {
return new Promise((resolve) => {
const reader = new FileReader();
reader.onload = (e) => resolve(e.target.result);
reader.readAsDataURL(file);
});
}
const imageInput = document.getElementById("image-input");
const preview = document.getElementById("preview");
imageInput.addEventListener("change", async (e) => {
const file = e.target.files[0];
if (file && file.type.startsWith("image/")) {
const dataUrl = await previewImage(file);
preview.src = dataUrl;
preview.style.display = "block";
}
});Upload Progress Tracking
Fetch does not natively support upload progress. Use XMLHttpRequest for progress events:
function uploadWithProgress(url, formData, onProgress) {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.upload.addEventListener("progress", (event) => {
if (event.lengthComputable) {
const percent = Math.round((event.loaded / event.total) * 100);
onProgress(percent, event.loaded, event.total);
}
});
xhr.addEventListener("load", () => {
if (xhr.status >= 200 && xhr.status < 300) {
resolve(JSON.parse(xhr.responseText));
} else {
reject(new Error(`Upload failed: ${xhr.status}`));
}
});
xhr.addEventListener("error", () => reject(new Error("Network error")));
xhr.addEventListener("abort", () => reject(new Error("Upload cancelled")));
xhr.open("POST", url);
xhr.send(formData);
});
}
// Usage
const formData = new FormData();
formData.append("file", file);
await uploadWithProgress("/api/upload", formData, (percent, loaded, total) => {
progressBar.style.width = `${percent}%`;
progressText.textContent = `${percent}% (${(loaded / 1024 / 1024).toFixed(1)} MB / ${(total / 1024 / 1024).toFixed(1)} MB)`;
});Chunked Upload for Large Files
Split large files into chunks for reliable uploads with resume capability:
async function chunkedUpload(file, chunkSize = 5 * 1024 * 1024) {
const totalChunks = Math.ceil(file.size / chunkSize);
const uploadId = crypto.randomUUID();
for (let i = 0; i < totalChunks; i++) {
const start = i * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const formData = new FormData();
formData.append("chunk", chunk);
formData.append("uploadId", uploadId);
formData.append("chunkIndex", i.toString());
formData.append("totalChunks", totalChunks.toString());
formData.append("fileName", file.name);
const response = await fetch("/api/upload/chunk", {
method: "POST",
body: formData,
});
if (!response.ok) {
throw new Error(`Chunk ${i} failed: ${response.status}`);
}
console.log(`Chunk ${i + 1}/${totalChunks} uploaded`);
}
// Finalize — tell server to assemble chunks
const response = await fetch("/api/upload/finalize", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ uploadId, fileName: file.name, totalChunks }),
});
return response.json();
}Rune AI
Key Insights
- Never set Content-Type for FormData: The browser must generate the multipart boundary string automatically
- Validate client-side first, server-side always: Client validation is UX; server validation is security
- Fetch has no upload progress API: Use XMLHttpRequest's
upload.onprogressevent for progress bars - Chunked uploads enable large files and resume: Split files into 5-10 MB chunks with a server-side assembly step
- Drag-and-drop uses DataTransfer.files: Access dropped files from the event's
dataTransferproperty after preventing default behavior
Frequently Asked Questions
Why does setting Content-Type manually break FormData uploads?
How do I upload a Blob or canvas image?
Can I cancel an ongoing upload?
What is the maximum file size for fetch uploads?
Conclusion
File uploads with Fetch use FormData for automatic multipart encoding. Always validate files client-side before uploading, never set Content-Type manually with FormData, and use chunked uploads for large files. For upload progress, fall back to XHR. For the full Fetch API, see how to use the JS fetch API complete tutorial. For POST request patterns beyond file uploads, see handling POST requests with JS fetch API guide.
More in this topic
OffscreenCanvas API in JS for UI Performance
Master the OffscreenCanvas API to offload rendering from the main thread. Covers worker-based 2D and WebGL rendering, animation loops inside workers, bitmap transfer, double buffering, chart rendering pipelines, image processing, and performance measurement strategies.
Advanced Web Workers for High Performance JS
Master Web Workers for truly parallel JavaScript execution. Covers dedicated and shared workers, structured cloning, transferable objects, SharedArrayBuffer with Atomics, worker pools, task scheduling, Comlink RPC patterns, module workers, and performance profiling strategies.
JavaScript Macros and Abstract Code Generation
Master JavaScript code generation techniques for compile-time and runtime metaprogramming. Covers AST manipulation, Babel plugin authorship, tagged template literals as macros, code generation pipelines, source-to-source transformation, compile-time evaluation, and safe eval alternatives.