Writing Self-Modifying Code in JS Architecture

Explore self-modifying JavaScript patterns for adaptive, optimizing architectures. Covers function replacement, hot-path optimization, lazy initialization, adaptive algorithms, plugin hot-reloading, self-patching APIs, runtime code generation, and safety guardrails for production use.

JavaScriptadvanced
17 min read

Self-modifying code replaces its own implementation at runtime. In JavaScript, this means functions that overwrite themselves after first execution, objects that reshape their method implementations based on usage patterns, and architectures that adapt to runtime conditions.

For the metaprogramming foundations, see JS Metaprogramming Advanced Architecture Guide.

Function Self-Replacement

The simplest self-modifying pattern is a function that overwrites itself after the first call. On the initial invocation, it does the expensive setup work and then replaces itself with a lightweight version that just returns the cached result. Every subsequent call skips initialization entirely. The second example takes this further with a self-optimizing sort that profiles incoming data types and eventually swaps its implementation to a specialized comparator.

javascriptjavascript
// A function that replaces itself after first call (lazy initialization)
 
let getDatabase = function() {
  // Heavy initialization runs only once
  const connection = {
    host: "localhost",
    port: 5432,
    connected: true,
    query(sql) { return [{ id: 1 }]; }
  };
 
  console.log("Database initialized");
 
  // Replace ourselves with a function that returns the cached connection
  getDatabase = function() {
    return connection;
  };
 
  return connection;
};
 
getDatabase(); // "Database initialized" (first call: initializes)
getDatabase(); // (subsequent calls: returns cached connection instantly)
getDatabase(); // (no initialization overhead)
 
// SELF-OPTIMIZING FUNCTION
// Detects the hot path and optimizes for it
function createOptimizingSort() {
  let callCount = 0;
  let numericCount = 0;
  let stringCount = 0;
  const threshold = 10;
 
  let sort = function(arr) {
    callCount++;
 
    // Detect data type pattern
    const isNumeric = arr.every(item => typeof item === "number");
    if (isNumeric) numericCount++;
    else stringCount++;
 
    // After enough samples, replace with optimized version
    if (callCount >= threshold) {
      if (numericCount > stringCount * 2) {
        console.log("Optimizing for numeric sort");
        sort = (arr) => arr.slice().sort((a, b) => a - b);
      } else if (stringCount > numericCount * 2) {
        console.log("Optimizing for string sort");
        sort = (arr) => arr.slice().sort((a, b) => a.localeCompare(b));
      }
    }
 
    // Generic sort for the profiling phase
    return arr.slice().sort();
  };
 
  // Return a wrapper that calls the current sort
  return function(arr) {
    return sort(arr);
  };
}
 
const smartSort = createOptimizingSort();
 
// Feed it numeric arrays
for (let i = 0; i < 10; i++) {
  smartSort([3, 1, 4, 1, 5, 9]); // After 10 calls, optimizes for numeric
}
 
console.log(smartSort([5, 2, 8, 1])); // [1, 2, 5, 8] (uses numeric-optimized path)
 
// CONDITIONAL FEATURE LOADING
let renderChart;
 
if (typeof OffscreenCanvas !== "undefined") {
  renderChart = function(data) {
    // Use OffscreenCanvas for better performance
    return { engine: "offscreen", data };
  };
} else {
  renderChart = function(data) {
    // Fallback to regular canvas
    return { engine: "canvas", data };
  };
}

Lazy Property Initialization

Lazy properties work the same way as function self-replacement but at the property level. You define a getter with Object.defineProperty that runs an initializer on first access, then immediately redefines itself as a plain data property with the computed value. After that first read, the getter is gone and you are just reading a normal property. This is a good fit for objects with expensive-to-compute fields that may never actually be accessed.

javascriptjavascript
// Properties that compute their value on first access, then replace themselves
 
function lazyProperty(obj, name, initializer) {
  Object.defineProperty(obj, name, {
    get() {
      // Compute value once
      const value = initializer.call(this);
 
      // Replace getter with a simple data property
      Object.defineProperty(this, name, {
        value,
        writable: false,
        configurable: false,
        enumerable: true
      });
 
      return value;
    },
    configurable: true,
    enumerable: true
  });
}
 
class HeavyComponent {
  constructor(config) {
    this.config = config;
 
    // These won't compute until accessed
    lazyProperty(this, "schema", () => {
      console.log("Computing schema...");
      return this.#buildSchema();
    });
 
    lazyProperty(this, "validators", () => {
      console.log("Building validators...");
      return this.#buildValidators();
    });
 
    lazyProperty(this, "serializer", () => {
      console.log("Creating serializer...");
      return this.#buildSerializer();
    });
  }
 
  #buildSchema() {
    // Expensive schema computation
    return { fields: Object.keys(this.config), version: 1 };
  }
 
  #buildValidators() {
    // Expensive validator setup
    return Object.fromEntries(
      Object.entries(this.config).map(([k, v]) => [k, (val) => typeof val === typeof v])
    );
  }
 
  #buildSerializer() {
    const fields = Object.keys(this.config);
    return {
      serialize(data) {
        return JSON.stringify(Object.fromEntries(
          fields.filter(f => f in data).map(f => [f, data[f]])
        ));
      }
    };
  }
}
 
const comp = new HeavyComponent({ name: "", age: 0, email: "" });
// Nothing computed yet
 
console.log(comp.schema);      // "Computing schema..." (first access)
console.log(comp.schema);      // Returns cached value (no log)
// validators and serializer still not computed
 
// LAZY MODULE PATTERN
const modules = {};
 
function lazyModule(name, factory) {
  Object.defineProperty(modules, name, {
    get() {
      const mod = factory();
      Object.defineProperty(modules, name, { value: mod });
      return mod;
    },
    configurable: true,
    enumerable: true
  });
}
 
lazyModule("crypto", () => {
  console.log("Loading crypto module...");
  return { hash: (s) => s.split("").reverse().join("") };
});
 
lazyModule("parser", () => {
  console.log("Loading parser module...");
  return { parse: (s) => JSON.parse(s) };
});
 
// Modules load only when needed
console.log(modules.crypto.hash("hello")); // "Loading crypto module..." -> "olleh"
console.log(modules.crypto.hash("world")); // "dlrow" (cached, no loading)

Adaptive Algorithms

An adaptive algorithm monitors its own usage patterns and switches strategies when it detects a better fit. The AdaptiveCache below starts with LRU eviction, then periodically analyzes access frequency variance. If some keys are accessed far more than others, it switches to LFU. If access is roughly even, it sticks with LRU. The rate limiter at the end of this section does something similar, automatically increasing its limit when the rejection rate gets too high.

javascriptjavascript
// Algorithms that modify their strategy based on runtime data
 
class AdaptiveCache {
  #storage = new Map();
  #accessCounts = new Map();
  #strategy = "lru"; // Start with LRU
  #evictionHistory = [];
  #adaptationThreshold = 50;
  #operationCount = 0;
 
  constructor(maxSize = 100) {
    this.maxSize = maxSize;
    this.#evict = this.#lruEvict.bind(this); // Initial strategy
  }
 
  #evict; // Function reference, replaced at runtime
 
  get(key) {
    this.#operationCount++;
    const count = (this.#accessCounts.get(key) || 0) + 1;
    this.#accessCounts.set(key, count);
 
    this.#maybeAdapt();
    return this.#storage.get(key);
  }
 
  set(key, value) {
    if (this.#storage.size >= this.maxSize && !this.#storage.has(key)) {
      this.#evict();
    }
    this.#storage.set(key, value);
  }
 
  #lruEvict() {
    // Remove the least recently used (first inserted in Map order)
    const firstKey = this.#storage.keys().next().value;
    this.#evictionHistory.push({ key: firstKey, strategy: "lru" });
    this.#storage.delete(firstKey);
    this.#accessCounts.delete(firstKey);
  }
 
  #lfuEvict() {
    // Remove the least frequently used
    let minKey = null;
    let minCount = Infinity;
 
    for (const [key, count] of this.#accessCounts) {
      if (count < minCount) {
        minCount = count;
        minKey = key;
      }
    }
 
    if (minKey !== null) {
      this.#evictionHistory.push({ key: minKey, strategy: "lfu" });
      this.#storage.delete(minKey);
      this.#accessCounts.delete(minKey);
    }
  }
 
  #maybeAdapt() {
    if (this.#operationCount % this.#adaptationThreshold !== 0) return;
 
    // Analyze access patterns
    const counts = [...this.#accessCounts.values()];
    const avg = counts.reduce((a, b) => a + b, 0) / counts.length || 0;
    const variance = counts.reduce((sum, c) => sum + Math.pow(c - avg, 2), 0) / counts.length;
 
    // High variance = some items accessed much more than others -> use LFU
    // Low variance = even access pattern -> use LRU
    if (variance > avg * 2 && this.#strategy !== "lfu") {
      console.log("Adapting to LFU strategy (uneven access detected)");
      this.#strategy = "lfu";
      this.#evict = this.#lfuEvict.bind(this);
    } else if (variance <= avg * 2 && this.#strategy !== "lru") {
      console.log("Adapting to LRU strategy (even access detected)");
      this.#strategy = "lru";
      this.#evict = this.#lruEvict.bind(this);
    }
  }
 
  get currentStrategy() {
    return this.#strategy;
  }
}
 
// SELF-TUNING RATE LIMITER
class AdaptiveRateLimiter {
  #windowMs;
  #maxRequests;
  #timestamps = [];
  #rejections = 0;
  #totalRequests = 0;
 
  constructor(windowMs = 60000, maxRequests = 100) {
    this.#windowMs = windowMs;
    this.#maxRequests = maxRequests;
  }
 
  tryAcquire() {
    const now = Date.now();
    this.#totalRequests++;
 
    // Clean old timestamps
    this.#timestamps = this.#timestamps.filter(t => now - t < this.#windowMs);
 
    if (this.#timestamps.length >= this.#maxRequests) {
      this.#rejections++;
      this.#maybeTune();
      return false;
    }
 
    this.#timestamps.push(now);
    return true;
  }
 
  #maybeTune() {
    // If rejection rate is too high, increase the limit
    const rejectionRate = this.#rejections / this.#totalRequests;
 
    if (rejectionRate > 0.3 && this.#totalRequests > 50) {
      const oldLimit = this.#maxRequests;
      this.#maxRequests = Math.ceil(this.#maxRequests * 1.2);
      console.log(`Rate limit adjusted: ${oldLimit} -> ${this.#maxRequests}`);
      this.#rejections = 0;
      this.#totalRequests = 0;
    }
  }
}

Hot-Reloading and Live Patching

Hot-reloading swaps out module implementations while your app keeps running. The trick is a Proxy sitting between consumer code and the actual module. When you register a new version, the Proxy automatically delegates to it, so existing references do not break. This is useful during development and also in production plugin systems where you need to update behavior without downtime. The ResilientService at the bottom takes this a step further by auto-patching handlers that fail repeatedly, wrapping them in retry logic after a threshold of errors.

javascriptjavascript
// Replace module implementations at runtime without restart
 
class ModuleRegistry {
  #modules = new Map();
  #proxies = new Map();
  #versions = new Map();
  #listeners = new Map();
 
  register(name, implementation) {
    const version = (this.#versions.get(name) || 0) + 1;
    this.#versions.set(name, version);
    this.#modules.set(name, implementation);
 
    console.log(`Module "${name}" registered (v${version})`);
 
    // Notify listeners
    const listeners = this.#listeners.get(name) || [];
    for (const listener of listeners) {
      listener(implementation, version);
    }
 
    // If proxy exists, it automatically uses the new implementation
    return this;
  }
 
  resolve(name) {
    if (this.#proxies.has(name)) {
      return this.#proxies.get(name);
    }
 
    // Create a proxy that always delegates to the current implementation
    const registry = this;
    const proxy = new Proxy({}, {
      get(target, prop) {
        const impl = registry.#modules.get(name);
        if (!impl) throw new Error(`Module "${name}" not registered`);
 
        const value = impl[prop];
        if (typeof value === "function") {
          return value.bind(impl);
        }
        return value;
      },
 
      set(target, prop, value) {
        const impl = registry.#modules.get(name);
        if (!impl) throw new Error(`Module "${name}" not registered`);
        impl[prop] = value;
        return true;
      }
    });
 
    this.#proxies.set(name, proxy);
    return proxy;
  }
 
  onReload(name, callback) {
    if (!this.#listeners.has(name)) {
      this.#listeners.set(name, []);
    }
    this.#listeners.get(name).push(callback);
  }
 
  getVersion(name) {
    return this.#versions.get(name) || 0;
  }
}
 
const registry = new ModuleRegistry();
 
// Version 1
registry.register("formatter", {
  format(data) { return JSON.stringify(data); },
  version: 1
});
 
const formatter = registry.resolve("formatter");
console.log(formatter.format({ a: 1 })); // '{"a":1}'
 
// Hot-reload version 2 (with pretty printing)
registry.register("formatter", {
  format(data) { return JSON.stringify(data, null, 2); },
  version: 2
});
 
// Same reference, new behavior
console.log(formatter.format({ a: 1 }));
// {
//   "a": 1
// }
 
// SELF-PATCHING ERROR HANDLER
class ResilientService {
  #handlers = new Map();
  #errorCounts = new Map();
  #patchThreshold = 3;
 
  register(name, handler) {
    this.#handlers.set(name, handler);
    this.#errorCounts.set(name, 0);
  }
 
  async execute(name, ...args) {
    const handler = this.#handlers.get(name);
    if (!handler) throw new Error(`Handler "${name}" not found`);
 
    try {
      return await handler(...args);
    } catch (err) {
      const count = this.#errorCounts.get(name) + 1;
      this.#errorCounts.set(name, count);
 
      if (count >= this.#patchThreshold) {
        console.log(`Auto-patching "${name}" after ${count} failures`);
        this.#patch(name, err);
      }
 
      throw err;
    }
  }
 
  #patch(name, lastError) {
    const original = this.#handlers.get(name);
 
    // Wrap with retry logic
    this.#handlers.set(name, async (...args) => {
      for (let i = 0; i < 3; i++) {
        try {
          return await original(...args);
        } catch (err) {
          if (i === 2) throw err;
          await new Promise(r => setTimeout(r, 1000 * (i + 1)));
        }
      }
    });
 
    this.#errorCounts.set(name, 0);
  }
}

Safety Guardrails

Self-modifying code needs boundaries. Without them, debugging becomes a nightmare because the running code no longer matches your source files. The SafeModifier class below tracks every modification with timestamps and stack traces, stores the original values for rollback, enforces a maximum number of changes, and can be frozen to prevent further modifications. If something goes wrong, you can rollbackAll() to restore the original behavior and inspect the audit log to see exactly what changed and when.

javascriptjavascript
// Prevent dangerous self-modification with sandboxing and auditing
 
class SafeModifier {
  #original = new Map();
  #modifications = [];
  #maxModifications;
  #frozen = false;
 
  constructor(maxModifications = 100) {
    this.#maxModifications = maxModifications;
  }
 
  modify(target, property, newValue) {
    if (this.#frozen) {
      throw new Error("Modifier is frozen: no more changes allowed");
    }
 
    if (this.#modifications.length >= this.#maxModifications) {
      throw new Error(`Maximum modifications (${this.#maxModifications}) reached`);
    }
 
    // Store original for rollback
    const key = `${target.constructor.name}.${String(property)}`;
    if (!this.#original.has(key)) {
      this.#original.set(key, {
        target,
        property,
        descriptor: Object.getOwnPropertyDescriptor(target, property),
        existed: property in target
      });
    }
 
    const oldValue = target[property];
 
    this.#modifications.push({
      key,
      property,
      oldValue,
      newValue,
      timestamp: Date.now(),
      stack: new Error().stack
    });
 
    target[property] = newValue;
 
    return this;
  }
 
  rollback(count = 1) {
    for (let i = 0; i < count && this.#modifications.length > 0; i++) {
      const mod = this.#modifications.pop();
      const original = this.#original.get(mod.key);
 
      if (original) {
        if (original.existed) {
          Object.defineProperty(original.target, original.property, original.descriptor);
        } else {
          delete original.target[mod.property];
        }
        this.#original.delete(mod.key);
      }
    }
  }
 
  rollbackAll() {
    this.rollback(this.#modifications.length);
  }
 
  freeze() {
    this.#frozen = true;
  }
 
  audit() {
    return this.#modifications.map(m => ({
      property: m.key,
      oldValue: typeof m.oldValue === "function" ? m.oldValue.name || "fn" : m.oldValue,
      newValue: typeof m.newValue === "function" ? m.newValue.name || "fn" : m.newValue,
      timestamp: new Date(m.timestamp).toISOString()
    }));
  }
}
 
const modifier = new SafeModifier(50);
 
const service = {
  process(data) { return data; },
  validate(data) { return true; }
};
 
modifier.modify(service, "process", function enhanced(data) {
  console.log("Enhanced processing");
  return { ...data, enhanced: true };
});
 
console.log(service.process({ x: 1 })); // Enhanced processing -> { x: 1, enhanced: true }
 
// Audit trail
console.log(modifier.audit());
 
// Rollback
modifier.rollbackAll();
console.log(service.process({ x: 1 })); // { x: 1 } (original behavior restored)
PatternWhen to UseRisk LevelMitigation
Lazy initializationHeavy setup, may not be neededLowImmutable after init
Function replacementOne-time feature detectionLowReplace once, never again
Adaptive algorithmUsage patterns vary at runtimeMediumBounded adaptation, monitoring
Hot-reloadDevelopment, plugin systemsMediumProxy indirection, version tracking
Self-patchingFault toleranceMedium-HighAudit trail, rollback capability
Runtime code generationTemplate compilation, DSLsHighSandboxing, input validation
Rune AI

Rune AI

Key Insights

  • Function self-replacement after first call (lazy initialization) is a safe, widely-used pattern that avoids paying setup costs until needed: The function overwrites itself with a simpler version that returns the cached result
  • Adaptive algorithms that modify their strategy based on runtime data can outperform fixed strategies: Implement bounded adaptation with monitoring to prevent runaway behavior changes
  • Hot-reloading via Proxy indirection allows replacing module implementations without changing consumer code: The proxy always delegates to the current implementation in the registry
  • Safety guardrails (audit trails, rollback, modification limits, freeze) are essential for production self-modifying code: Track every modification with timestamps and stack traces for debugging
  • V8 deoptimizes and reoptimizes when functions are replaced, making one-time replacements efficient but frequent modifications costly: Keep self-modification outside hot loops and performance-critical paths
RunePowered by Rune AI

Frequently Asked Questions

Is self-modifying code safe for production?

Safe patterns like lazy initialization and one-time feature detection are widely used in production. They are predictable: the function replaces itself exactly once. Adaptive algorithms and hot-reloading require more caution. Always implement audit trails, rollback mechanisms, and modification limits. Avoid self-modification in security-sensitive code paths. The key principle is predictability: if you can enumerate all possible states a function can be in, the pattern is manageable. If modifications are unbounded or data-dependent, the complexity becomes a liability.

How does self-modifying code interact with V8 optimization?

V8's TurboFan compiler creates optimized machine code based on observed type profiles. When a function replaces itself, V8 must deoptimize and reoptimize for the new function. This is fine for one-time replacements (lazy init) but harmful for frequently changing functions. V8 may mark a function as "megamorphic" if it sees too many different implementations, permanently disabling certain optimizations. Keep self-modification to initialization phases and avoid modifying functions in hot loops.

How do I debug self-modifying code?

Self-modifying code is harder to debug because the code you see in the source is not the code that runs. Strategies: log every modification with timestamps and stack traces (audit trail), use named functions so stack traces are readable, implement rollback so you can restore original behavior for testing, and add a "debug mode" that disables self-modification entirely. Browser DevTools show the current function source in the console, not the original source, so breakpoints may not work as expected on replaced functions.

Can self-modifying code cause memory leaks?

Yes, if old function references are captured in closures. When a function replaces itself but the old version is referenced by an event listener, timer, or closure, both versions remain in memory. Use WeakMap to associate data with functions so old data is collected with old functions. For lazy properties, `Object.defineProperty` with `configurable: true` allows proper replacement. Always verify that old references are released when functions are replaced.

Conclusion

Self-modifying JavaScript follows a spectrum from safe lazy initialization to powerful but risky runtime code generation. Use the lightest pattern that solves your problem and add guardrails (audit trails, rollback, limits) as complexity increases. For the metaprogramming toolkit that enables these patterns, see JS Metaprogramming Advanced Architecture Guide. For UI framework architectures that leverage these techniques, explore Creating Advanced UI Frameworks in JavaScript.