OffscreenCanvas API in JS for UI Performance

Master the OffscreenCanvas API to offload rendering from the main thread. Covers worker-based 2D and WebGL rendering, animation loops inside workers, bitmap transfer, double buffering, chart rendering pipelines, image processing, and performance measurement strategies.

JavaScriptadvanced
18 min read

OffscreenCanvas decouples rendering from the main thread. It provides a canvas that can be used inside Web Workers, preventing heavy draw calls from blocking the UI. Two modes exist: transferControlToOffscreen (worker owns the canvas element) and standalone OffscreenCanvas (worker-only, returns ImageBitmap).

For the fundamentals of Web Workers, see Advanced Web Workers for High Performance JS.

Transfer Control Mode

javascriptjavascript
// === main.js ===
// Transfer an on-screen canvas element to a worker for rendering
const canvas = document.getElementById("renderCanvas");
const offscreen = canvas.transferControlToOffscreen();
 
const worker = new Worker("render-worker.js");
 
// Transfer the OffscreenCanvas to the worker (one-time, ownership moves)
worker.postMessage({ canvas: offscreen }, [offscreen]);
 
// Main thread is completely free for UI interaction
// The worker now owns the canvas and can render at 60fps independently
 
// Send configuration updates without blocking rendering
worker.postMessage({ type: "config", color: "#ff6600", speed: 2 });
 
// Resize handling
window.addEventListener("resize", () => {
  worker.postMessage({
    type: "resize",
    width: canvas.clientWidth * devicePixelRatio,
    height: canvas.clientHeight * devicePixelRatio
  });
});
 
// === render-worker.js ===
let ctx = null;
let animationId = null;
let config = { color: "#3366ff", speed: 1 };
let width = 0;
let height = 0;
 
self.onmessage = (event) => {
  const data = event.data;
 
  if (data.canvas) {
    // Received the transferred OffscreenCanvas
    const canvas = data.canvas;
    ctx = canvas.getContext("2d");
    width = canvas.width;
    height = canvas.height;
    startRenderLoop();
  }
 
  if (data.type === "config") {
    config = { ...config, ...data };
  }
 
  if (data.type === "resize") {
    width = data.width;
    height = data.height;
  }
};
 
function startRenderLoop() {
  let frame = 0;
 
  function render() {
    frame++;
 
    // Clear
    ctx.clearRect(0, 0, width, height);
 
    // Draw animated content (runs on worker thread, never blocks UI)
    const centerX = width / 2;
    const centerY = height / 2;
    const radius = Math.min(width, height) * 0.3;
    const particleCount = 200;
 
    for (let i = 0; i < particleCount; i++) {
      const angle = (i / particleCount) * Math.PI * 2 + frame * 0.01 * config.speed;
      const r = radius * (0.5 + 0.5 * Math.sin(i * 0.1 + frame * 0.02));
      const x = centerX + Math.cos(angle) * r;
      const y = centerY + Math.sin(angle) * r;
      const size = 2 + Math.sin(i + frame * 0.05) * 2;
 
      ctx.fillStyle = config.color;
      ctx.globalAlpha = 0.5 + 0.5 * Math.sin(i * 0.3 + frame * 0.03);
      ctx.beginPath();
      ctx.arc(x, y, size, 0, Math.PI * 2);
      ctx.fill();
    }
 
    ctx.globalAlpha = 1;
    requestAnimationFrame(render); // requestAnimationFrame works in workers
  }
 
  render();
}

Standalone OffscreenCanvas (Bitmap Mode)

javascriptjavascript
// === main.js ===
// Create OffscreenCanvas independently (not tied to a DOM canvas)
// Worker renders to it, then transfers ImageBitmap to main thread for display
 
const displayCanvas = document.getElementById("display");
const displayCtx = displayCanvas.getContext("bitmaprenderer");
 
const worker = new Worker("bitmap-worker.js");
 
worker.onmessage = (event) => {
  if (event.data.bitmap) {
    // Transfer the rendered bitmap to the visible canvas
    displayCtx.transferFromImageBitmap(event.data.bitmap);
    // The bitmap is consumed; no need to close it
  }
};
 
worker.postMessage({
  type: "start",
  width: displayCanvas.width,
  height: displayCanvas.height
});
 
// === bitmap-worker.js ===
let offscreen = null;
let ctx = null;
 
self.onmessage = (event) => {
  if (event.data.type === "start") {
    const { width, height } = event.data;
 
    // Create standalone OffscreenCanvas (no DOM element)
    offscreen = new OffscreenCanvas(width, height);
    ctx = offscreen.getContext("2d");
 
    renderFrame();
  }
};
 
function renderFrame() {
  const w = offscreen.width;
  const h = offscreen.height;
 
  // Draw content on the worker thread
  ctx.fillStyle = "#1a1a2e";
  ctx.fillRect(0, 0, w, h);
 
  // Draw a grid of cells
  const cellSize = 10;
  const cols = Math.floor(w / cellSize);
  const rows = Math.floor(h / cellSize);
 
  for (let r = 0; r < rows; r++) {
    for (let c = 0; c < cols; c++) {
      const alive = Math.random() > 0.7;
      if (alive) {
        ctx.fillStyle = `hsl(${(r + c) * 3}, 70%, 50%)`;
        ctx.fillRect(c * cellSize, r * cellSize, cellSize - 1, cellSize - 1);
      }
    }
  }
 
  // Convert to ImageBitmap and transfer to main thread
  const bitmap = offscreen.transferToImageBitmap();
  self.postMessage({ bitmap }, [bitmap]); // Transfer (zero-copy)
 
  // Schedule next frame
  setTimeout(renderFrame, 1000 / 30); // 30fps
}
 
// ALTERNATIVE: use createImageBitmap for async conversion
// async function renderFrameAsync() {
//   // ... draw to ctx ...
//   const bitmap = await createImageBitmap(offscreen);
//   self.postMessage({ bitmap }, [bitmap]);
// }

WebGL in a Worker

javascriptjavascript
// === webgl-worker.js ===
let gl = null;
let program = null;
let positionBuffer = null;
let frame = 0;
 
self.onmessage = (event) => {
  if (event.data.canvas) {
    const canvas = event.data.canvas;
    gl = canvas.getContext("webgl2");
 
    if (!gl) {
      self.postMessage({ error: "WebGL2 not supported in worker" });
      return;
    }
 
    initWebGL();
    renderLoop();
  }
};
 
function initWebGL() {
  // Vertex shader
  const vsSource = `#version 300 es
    in vec2 aPosition;
    uniform float uTime;
    out vec2 vPos;
    void main() {
      float wave = sin(aPosition.x * 3.0 + uTime) * 0.1;
      gl_Position = vec4(aPosition.x, aPosition.y + wave, 0.0, 1.0);
      vPos = aPosition;
    }
  `;
 
  // Fragment shader
  const fsSource = `#version 300 es
    precision mediump float;
    in vec2 vPos;
    uniform float uTime;
    out vec4 fragColor;
    void main() {
      float r = 0.5 + 0.5 * sin(vPos.x * 5.0 + uTime);
      float g = 0.5 + 0.5 * cos(vPos.y * 5.0 + uTime * 0.7);
      float b = 0.5 + 0.5 * sin(uTime * 0.5);
      fragColor = vec4(r, g, b, 1.0);
    }
  `;
 
  // Compile shaders
  const vs = compileShader(gl.VERTEX_SHADER, vsSource);
  const fs = compileShader(gl.FRAGMENT_SHADER, fsSource);
 
  program = gl.createProgram();
  gl.attachShader(program, vs);
  gl.attachShader(program, fs);
  gl.linkProgram(program);
 
  if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
    self.postMessage({ error: gl.getProgramInfoLog(program) });
    return;
  }
 
  // Create geometry (triangle strip quad)
  const positions = new Float32Array([
    -1, -1,   1, -1,   -1, 1,   1, 1
  ]);
 
  positionBuffer = gl.createBuffer();
  gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
  gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}
 
function compileShader(type, source) {
  const shader = gl.createShader(type);
  gl.shaderSource(shader, source);
  gl.compileShader(shader);
 
  if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
    const info = gl.getShaderInfoLog(shader);
    gl.deleteShader(shader);
    throw new Error(`Shader compile error: ${info}`);
  }
 
  return shader;
}
 
function renderLoop() {
  frame++;
 
  gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
  gl.clearColor(0.05, 0.05, 0.15, 1.0);
  gl.clear(gl.COLOR_BUFFER_BIT);
 
  gl.useProgram(program);
 
  // Set time uniform
  const timeLoc = gl.getUniformLocation(program, "uTime");
  gl.uniform1f(timeLoc, frame * 0.02);
 
  // Bind position attribute
  const posLoc = gl.getAttribLocation(program, "aPosition");
  gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
  gl.enableVertexAttribArray(posLoc);
  gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0);
 
  // Draw
  gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
 
  requestAnimationFrame(renderLoop);
}

Double Buffering Pattern

javascriptjavascript
// Use two OffscreenCanvases: one renders while the other displays
 
class DoubleBufferedRenderer {
  #frontBuffer;
  #backBuffer;
  #frontCtx;
  #backCtx;
  #width;
  #height;
  #rendering = false;
 
  constructor(width, height) {
    this.#width = width;
    this.#height = height;
 
    // Two off-screen buffers
    this.#frontBuffer = new OffscreenCanvas(width, height);
    this.#backBuffer = new OffscreenCanvas(width, height);
    this.#frontCtx = this.#frontBuffer.getContext("2d");
    this.#backCtx = this.#backBuffer.getContext("2d");
  }
 
  // Draw to back buffer (not yet visible)
  renderToBackBuffer(drawFn) {
    this.#backCtx.clearRect(0, 0, this.#width, this.#height);
    drawFn(this.#backCtx, this.#width, this.#height);
  }
 
  // Swap front and back buffers
  swap() {
    [this.#frontBuffer, this.#backBuffer] = [this.#backBuffer, this.#frontBuffer];
    [this.#frontCtx, this.#backCtx] = [this.#backCtx, this.#frontCtx];
  }
 
  // Get the front buffer as ImageBitmap for display
  getDisplayBitmap() {
    return this.#frontBuffer.transferToImageBitmap();
  }
 
  // Render frame with automatic swap
  renderFrame(drawFn) {
    this.renderToBackBuffer(drawFn);
    this.swap();
    return this.getDisplayBitmap();
  }
}
 
// USAGE IN WORKER
// const renderer = new DoubleBufferedRenderer(800, 600);
//
// function animate() {
//   const bitmap = renderer.renderFrame((ctx, w, h) => {
//     // Complex drawing that takes variable time
//     ctx.fillStyle = "#222";
//     ctx.fillRect(0, 0, w, h);
//
//     for (let i = 0; i < 1000; i++) {
//       ctx.fillStyle = `hsl(${i * 0.36}, 80%, 60%)`;
//       ctx.fillRect(
//         Math.random() * w,
//         Math.random() * h,
//         5, 5
//       );
//     }
//   });
//
//   self.postMessage({ bitmap }, [bitmap]);
//   requestAnimationFrame(animate);
// }
// animate();

Image Processing Pipeline

javascriptjavascript
// Process images entirely off the main thread
 
// === image-processor-worker.js ===
const filters = {
  grayscale(pixels) {
    const data = pixels.data;
    for (let i = 0; i < data.length; i += 4) {
      const avg = data[i] * 0.299 + data[i + 1] * 0.587 + data[i + 2] * 0.114;
      data[i] = data[i + 1] = data[i + 2] = avg;
    }
    return pixels;
  },
 
  sepia(pixels) {
    const data = pixels.data;
    for (let i = 0; i < data.length; i += 4) {
      const r = data[i], g = data[i + 1], b = data[i + 2];
      data[i]     = Math.min(255, r * 0.393 + g * 0.769 + b * 0.189);
      data[i + 1] = Math.min(255, r * 0.349 + g * 0.686 + b * 0.168);
      data[i + 2] = Math.min(255, r * 0.272 + g * 0.534 + b * 0.131);
    }
    return pixels;
  },
 
  blur(pixels, width, height, radius = 3) {
    const data = pixels.data;
    const copy = new Uint8ClampedArray(data);
    const size = radius * 2 + 1;
    const area = size * size;
 
    for (let y = radius; y < height - radius; y++) {
      for (let x = radius; x < width - radius; x++) {
        let r = 0, g = 0, b = 0;
 
        for (let dy = -radius; dy <= radius; dy++) {
          for (let dx = -radius; dx <= radius; dx++) {
            const idx = ((y + dy) * width + (x + dx)) * 4;
            r += copy[idx];
            g += copy[idx + 1];
            b += copy[idx + 2];
          }
        }
 
        const idx = (y * width + x) * 4;
        data[idx]     = r / area;
        data[idx + 1] = g / area;
        data[idx + 2] = b / area;
      }
    }
 
    return pixels;
  },
 
  sharpen(pixels, width, height) {
    const data = pixels.data;
    const copy = new Uint8ClampedArray(data);
 
    // Sharpening kernel: [0,-1,0,-1,5,-1,0,-1,0]
    const kernel = [0, -1, 0, -1, 5, -1, 0, -1, 0];
 
    for (let y = 1; y < height - 1; y++) {
      for (let x = 1; x < width - 1; x++) {
        for (let c = 0; c < 3; c++) {
          let val = 0;
          for (let ky = -1; ky <= 1; ky++) {
            for (let kx = -1; kx <= 1; kx++) {
              const idx = ((y + ky) * width + (x + kx)) * 4 + c;
              val += copy[idx] * kernel[(ky + 1) * 3 + (kx + 1)];
            }
          }
          data[(y * width + x) * 4 + c] = Math.max(0, Math.min(255, val));
        }
      }
    }
 
    return pixels;
  }
};
 
self.onmessage = async (event) => {
  const { type, bitmap, pipeline, width, height } = event.data;
 
  if (type === "process") {
    // Create OffscreenCanvas from ImageBitmap
    const canvas = new OffscreenCanvas(width, height);
    const ctx = canvas.getContext("2d");
    ctx.drawImage(bitmap, 0, 0);
    bitmap.close(); // Release the source bitmap
 
    let imageData = ctx.getImageData(0, 0, width, height);
 
    // Apply filter pipeline sequentially
    for (const filterName of pipeline) {
      if (filters[filterName]) {
        imageData = filters[filterName](imageData, width, height);
      }
    }
 
    ctx.putImageData(imageData, 0, 0);
 
    // Return processed result as ImageBitmap
    const result = canvas.transferToImageBitmap();
    self.postMessage({ result }, [result]);
  }
};
 
// === main.js ===
// async function processImage(imgElement, filterPipeline) {
//   const bitmap = await createImageBitmap(imgElement);
//
//   return new Promise((resolve) => {
//     const worker = new Worker("image-processor-worker.js");
//
//     worker.onmessage = (event) => {
//       const resultCanvas = document.getElementById("result");
//       const ctx = resultCanvas.getContext("bitmaprenderer");
//       ctx.transferFromImageBitmap(event.data.result);
//       worker.terminate();
//       resolve();
//     };
//
//     worker.postMessage({
//       type: "process",
//       bitmap,
//       pipeline: filterPipeline,
//       width: imgElement.naturalWidth,
//       height: imgElement.naturalHeight
//     }, [bitmap]);
//   });
// }
//
// // Apply grayscale + sharpen pipeline
// const img = document.querySelector("img");
// await processImage(img, ["grayscale", "sharpen"]);
FeatureMain Thread CanvasOffscreenCanvas (Transfer)OffscreenCanvas (Bitmap)
Where it rendersMain threadWorker threadWorker thread
DOM accessYesNoNo
Blocks UIYesNoNo
requestAnimationFrameYesYesNo (use setTimeout)
2D ContextYesYesYes
WebGL/WebGL2YesYesYes
Browser supportUniversalChrome 69+, Firefox 105+Chrome 69+, Firefox 105+
Use caseSimple UIContinuous animationOn-demand rendering
OptimizationImpactWhen to Use
transferControlToOffscreenFrees main thread completelyContinuous animations, games
transferToImageBitmapZero-copy frame deliveryOn-demand chart/image rendering
Double bufferingSmooth frame presentationComplex scenes with variable render time
Worker pools + OffscreenCanvasParallel renderingMultiple independent canvases
SharedArrayBuffer for vertex dataAvoid cloning geometryScenes with shared geometry across workers
Rune AI

Rune AI

Key Insights

  • OffscreenCanvas provides two modes: transfer mode moves an on-screen canvas to a worker, while standalone mode creates a worker-only canvas that returns ImageBitmap: Transfer mode is ideal for continuous animations; standalone mode suits on-demand rendering
  • WebGL and 2D contexts both work inside Web Workers when used with OffscreenCanvas: Complex shaders, particle systems, and data visualizations can run at full frame rate without blocking the UI thread
  • Double buffering with two OffscreenCanvases prevents visual tearing by rendering to one buffer while displaying the other: Swap the buffers after each frame to present the completed render
  • Image processing pipelines benefit greatly from worker-based OffscreenCanvas by applying filters (blur, sharpen, grayscale) without freezing the interface: Process the image as ImageData, apply convolution kernels, and return the result as an ImageBitmap
  • requestAnimationFrame works in workers with transferred canvases, providing vsync-aligned rendering independent of the main thread's workload: For standalone canvases without a DOM element, use setTimeout-based frame scheduling instead
RunePowered by Rune AI

Frequently Asked Questions

What is the difference between transferControlToOffscreen and standalone OffscreenCanvas?

`transferControlToOffscreen()` moves control of an existing on-screen canvas element to a worker. The worker's rendering automatically appears on screen. The main thread cannot use that canvas anymore. Standalone `new OffscreenCanvas(w, h)` creates a canvas with no DOM element. The worker renders to it and must explicitly send an ImageBitmap back to the main thread via postMessage. Use transfer mode for continuous animations (the worker drives the display). Use standalone mode for on-demand rendering where the main thread decides when to display results.

Does requestAnimationFrame work inside Web Workers?

Yes. Modern browsers support `requestAnimationFrame` inside workers when the worker has a transferred OffscreenCanvas. The callback runs at the display refresh rate (typically 60fps), synchronized with the compositor. For standalone OffscreenCanvas without a transferred element, use `setTimeout(fn, 1000/60)` or compute your own frame timing. Note that `requestAnimationFrame` in a worker is independent of the main thread's animation frame, so the worker can render at full frame rate even while the main thread is busy.

How do I handle canvas resizing with OffscreenCanvas in a worker?

The worker does not have access to the DOM, so it cannot detect resize events. The main thread must observe resize events (via `ResizeObserver` or `window.addEventListener("resize", ...)`) and send the new dimensions to the worker via postMessage. The worker then updates the canvas dimensions. For transferred canvases, set `canvas.width` and `canvas.height` in the worker. For standalone canvases, create a new OffscreenCanvas with the new size or resize the existing one.

Can I use OffscreenCanvas with React or other UI frameworks?

Yes. In React, use a ref to get the canvas element, call `transferControlToOffscreen()` in a useEffect hook (once on mount), and send the OffscreenCanvas to a worker. The worker owns rendering from that point. React can still control the canvas container's CSS dimensions and pass configuration to the worker via postMessage. The key constraint is that `transferControlToOffscreen()` can only be called once per canvas element, so guard against React's strict mode double-mounting in development by checking if the canvas has already been transferred.

Conclusion

OffscreenCanvas moves rendering off the main thread entirely, ensuring smooth UI interactions even during heavy draw operations. Combined with Web Workers and transferable objects, it enables high-performance visualization pipelines. For the Web Worker fundamentals behind these patterns, revisit Advanced Web Workers for High Performance JS. For reactive state management in UI frameworks, see Creating Advanced UI Frameworks in JavaScript.