javascript

exercises

exercises.js
/**
 * 21.3 Performance Optimization - Exercises
 *
 * Practice implementing performance optimization patterns
 */

/**
 * Exercise 1: Advanced Memoization
 *
 * Build a memoization system with:
 * - TTL (time-to-live) support
 * - LRU eviction policy
 * - Cache statistics
 * - Async function support
 * - Custom key generation
 * - Cache warming
 *
 * Requirements:
 * 1. Support both sync and async functions
 * 2. Implement proper LRU eviction
 * 3. Track cache hits, misses, and evictions
 * 4. Allow cache pre-population (warming)
 */

class AdvancedMemoizer {
  constructor(fn, options = {}) {
    // TODO: Initialize with options
    this.fn = fn;
    this.maxSize = options.maxSize || 100;
    this.ttl = options.ttl || Infinity;
    this.keyGenerator = options.keyGenerator || JSON.stringify;
    this.cache = new Map();
    this.stats = { hits: 0, misses: 0, evictions: 0 };
  }

  // Get or compute value
  async get(...args) {
    // TODO: Check cache, compute if missing
    // Handle TTL expiration
    // Update LRU order
  }

  // Pre-populate cache
  warm(entries) {
    // TODO: Add entries to cache
  }

  // Evict LRU entry
  evict() {
    // TODO: Remove least recently used entry
  }

  // Clear cache
  clear() {
    // TODO: Clear all entries
  }

  // Get statistics
  getStats() {
    // TODO: Return cache statistics
  }
}

// Test your implementation
function testAdvancedMemoizer() {
  const memoizer = new AdvancedMemoizer(
    async (x) => {
      await new Promise((r) => setTimeout(r, 100));
      return x * 2;
    },
    { maxSize: 3, ttl: 5000 }
  );

  console.log('Testing Advanced Memoizer...');

  // Add your tests here
}

// Uncomment to test
// testAdvancedMemoizer();

/**
 * Exercise 2: Worker Pool
 *
 * Implement a worker pool that:
 * - Manages a pool of workers
 * - Distributes tasks efficiently
 * - Handles worker failures
 * - Supports task prioritization
 *
 * Note: This simulates Web Workers for Node.js environment
 */

class WorkerPool {
  constructor(options = {}) {
    // TODO: Initialize worker pool
    this.poolSize = options.poolSize || 4;
    this.workers = [];
    this.taskQueue = [];
    this.running = new Map();
  }

  // Initialize pool
  initialize() {
    // TODO: Create worker instances
  }

  // Submit task to pool
  submit(task, priority = 0) {
    // TODO: Add task to queue with priority
    // Return promise for result
  }

  // Get next task based on priority
  getNextTask() {
    // TODO: Return highest priority task
  }

  // Assign task to available worker
  assignTask(worker, task) {
    // TODO: Run task on worker
  }

  // Handle worker completion
  onWorkerComplete(worker, result) {
    // TODO: Handle completion, assign next task
  }

  // Handle worker error
  onWorkerError(worker, error) {
    // TODO: Handle error, retry or fail
  }

  // Get pool status
  getStatus() {
    // TODO: Return pool statistics
  }

  // Shutdown pool
  async shutdown() {
    // TODO: Gracefully shutdown all workers
  }
}

// Simulated worker for Node.js
class SimulatedWorker {
  constructor(id) {
    this.id = id;
    this.busy = false;
  }

  async execute(task) {
    this.busy = true;
    try {
      const result = await task.fn(...task.args);
      return { success: true, result };
    } catch (error) {
      return { success: false, error };
    } finally {
      this.busy = false;
    }
  }
}

// Test your implementation
function testWorkerPool() {
  const pool = new WorkerPool({ poolSize: 2 });
  pool.initialize();

  console.log('Testing Worker Pool...');

  // Add your tests here
}

// Uncomment to test
// testWorkerPool();

/**
 * Exercise 3: Performance Monitor
 *
 * Create a performance monitoring system that:
 * - Tracks function execution times
 * - Monitors memory usage
 * - Detects slow operations
 * - Generates performance reports
 *
 * Requirements:
 * 1. Decorator-style API for easy integration
 * 2. Threshold-based alerting
 * 3. Percentile calculations (p50, p95, p99)
 * 4. Memory leak detection
 */

class PerformanceMonitor {
  constructor(options = {}) {
    // TODO: Initialize monitor
    this.metrics = new Map();
    this.thresholds = options.thresholds || {};
    this.listeners = [];
  }

  // Track function execution
  track(name) {
    // TODO: Return wrapper that tracks execution time
  }

  // Record a timing
  recordTiming(name, duration) {
    // TODO: Add timing to metrics
    // Check thresholds
  }

  // Calculate percentiles
  getPercentile(name, percentile) {
    // TODO: Calculate p50, p95, p99
  }

  // Get stats for a metric
  getStats(name) {
    // TODO: Return min, max, avg, p50, p95, p99
  }

  // Monitor memory
  getMemoryUsage() {
    // TODO: Return current memory stats
  }

  // Detect potential memory leaks
  checkForLeaks() {
    // TODO: Analyze memory trends
  }

  // Add alert listener
  onAlert(callback) {
    // TODO: Register alert callback
  }

  // Generate report
  generateReport() {
    // TODO: Create comprehensive performance report
  }
}

// Test your implementation
function testPerformanceMonitor() {
  const monitor = new PerformanceMonitor({
    thresholds: {
      slowFunction: 100,
    },
  });

  console.log('Testing Performance Monitor...');

  const tracked = monitor.track('slowFunction');

  async function slowFunction() {
    await new Promise((r) => setTimeout(r, 150));
    return 'done';
  }

  // Add your tests here
}

// Uncomment to test
// testPerformanceMonitor();

/**
 * Exercise 4: Incremental Renderer
 *
 * Build a system for rendering large datasets incrementally:
 * - Progressive rendering
 * - Priority-based rendering
 * - Cancellation support
 * - Frame budget management
 *
 * Requirements:
 * 1. Render within frame budget (16ms)
 * 2. Support priority queues
 * 3. Allow cancellation of pending renders
 * 4. Report render progress
 */

class IncrementalRenderer {
  constructor(options = {}) {
    // TODO: Initialize renderer
    this.frameBudget = options.frameBudget || 16;
    this.renderQueue = [];
    this.isRendering = false;
    this.cancelled = new Set();
  }

  // Queue items for rendering
  render(id, items, renderFn, options = {}) {
    // TODO: Add to render queue
    // Return promise with progress callback
  }

  // Start render loop
  startRenderLoop() {
    // TODO: Process items within frame budget
  }

  // Process single render job
  processJob(job, budget) {
    // TODO: Render as many items as budget allows
  }

  // Cancel render job
  cancel(id) {
    // TODO: Mark job as cancelled
  }

  // Get render status
  getStatus() {
    // TODO: Return queue status
  }
}

// Test your implementation
function testIncrementalRenderer() {
  const renderer = new IncrementalRenderer({ frameBudget: 16 });

  console.log('Testing Incremental Renderer...');

  const items = Array.from({ length: 1000 }, (_, i) => ({ id: i }));

  // Add your tests here
}

// Uncomment to test
// testIncrementalRenderer();

/**
 * Exercise 5: Smart Data Loader
 *
 * Implement a data loader with:
 * - Request batching
 * - Request deduplication
 * - Caching with invalidation
 * - Preloading predictions
 *
 * Requirements:
 * 1. Batch multiple requests together
 * 2. Deduplicate concurrent requests
 * 3. Smart cache invalidation
 * 4. Predictive preloading
 */

class SmartDataLoader {
  constructor(batchFn, options = {}) {
    // TODO: Initialize loader
    this.batchFn = batchFn;
    this.batchDelay = options.batchDelay || 10;
    this.maxBatchSize = options.maxBatchSize || 100;
    this.cache = new Map();
    this.pending = new Map();
    this.batch = [];
    this.batchTimer = null;
  }

  // Load single item
  async load(key) {
    // TODO: Check cache, batch, or fetch
  }

  // Load multiple items
  async loadMany(keys) {
    // TODO: Efficiently load multiple keys
  }

  // Dispatch batch
  async dispatchBatch() {
    // TODO: Send batched request
  }

  // Prime cache
  prime(key, value) {
    // TODO: Add to cache
  }

  // Clear cache
  clear(key) {
    // TODO: Clear specific key or all
  }

  // Preload based on prediction
  preload(predictedKeys) {
    // TODO: Background load predicted keys
  }
}

// Test your implementation
function testSmartDataLoader() {
  const loader = new SmartDataLoader(
    async (keys) => {
      console.log(`Batch loading: ${keys.join(', ')}`);
      await new Promise((r) => setTimeout(r, 50));
      return keys.map((key) => ({ id: key, data: `Data for ${key}` }));
    },
    { batchDelay: 10, maxBatchSize: 5 }
  );

  console.log('Testing Smart Data Loader...');

  // Add your tests here
}

// Uncomment to test
// testSmartDataLoader();

// ============================================================
// SOLUTIONS (Uncomment to reveal)
// ============================================================

/*
// SOLUTION 1: Advanced Memoizer

class AdvancedMemoizerSolution {
    constructor(fn, options = {}) {
        this.fn = fn;
        this.maxSize = options.maxSize || 100;
        this.ttl = options.ttl || Infinity;
        this.keyGenerator = options.keyGenerator || ((...args) => JSON.stringify(args));
        this.cache = new Map();
        this.timestamps = new Map();
        this.accessOrder = [];
        this.stats = { hits: 0, misses: 0, evictions: 0 };
    }
    
    async get(...args) {
        const key = this.keyGenerator(...args);
        const now = Date.now();
        
        if (this.cache.has(key)) {
            const timestamp = this.timestamps.get(key);
            
            if (now - timestamp < this.ttl) {
                this.stats.hits++;
                this.updateAccessOrder(key);
                return this.cache.get(key);
            }
            
            // Expired, remove
            this.cache.delete(key);
            this.timestamps.delete(key);
            this.removeFromAccessOrder(key);
        }
        
        this.stats.misses++;
        
        // Evict if needed
        while (this.cache.size >= this.maxSize) {
            this.evict();
        }
        
        // Compute value
        const value = await this.fn(...args);
        
        // Cache it
        this.cache.set(key, value);
        this.timestamps.set(key, now);
        this.accessOrder.push(key);
        
        return value;
    }
    
    warm(entries) {
        const now = Date.now();
        for (const [key, value] of Object.entries(entries)) {
            if (this.cache.size >= this.maxSize) {
                this.evict();
            }
            this.cache.set(key, value);
            this.timestamps.set(key, now);
            this.accessOrder.push(key);
        }
    }
    
    evict() {
        if (this.accessOrder.length === 0) return;
        
        const keyToEvict = this.accessOrder.shift();
        this.cache.delete(keyToEvict);
        this.timestamps.delete(keyToEvict);
        this.stats.evictions++;
    }
    
    updateAccessOrder(key) {
        this.removeFromAccessOrder(key);
        this.accessOrder.push(key);
    }
    
    removeFromAccessOrder(key) {
        const index = this.accessOrder.indexOf(key);
        if (index > -1) {
            this.accessOrder.splice(index, 1);
        }
    }
    
    clear() {
        this.cache.clear();
        this.timestamps.clear();
        this.accessOrder = [];
    }
    
    getStats() {
        const total = this.stats.hits + this.stats.misses;
        return {
            ...this.stats,
            size: this.cache.size,
            hitRate: total > 0 ? (this.stats.hits / total * 100).toFixed(1) + '%' : '0%'
        };
    }
}


// SOLUTION 2: Worker Pool

class WorkerPoolSolution {
    constructor(options = {}) {
        this.poolSize = options.poolSize || 4;
        this.workers = [];
        this.taskQueue = [];
        this.running = new Map();
        this.taskIdCounter = 0;
    }
    
    initialize() {
        for (let i = 0; i < this.poolSize; i++) {
            this.workers.push(new SimulatedWorker(i));
        }
    }
    
    submit(task, priority = 0) {
        return new Promise((resolve, reject) => {
            const taskId = ++this.taskIdCounter;
            
            this.taskQueue.push({
                id: taskId,
                fn: task,
                args: [],
                priority,
                resolve,
                reject
            });
            
            // Sort by priority (higher first)
            this.taskQueue.sort((a, b) => b.priority - a.priority);
            
            this.processQueue();
        });
    }
    
    getNextTask() {
        return this.taskQueue.shift();
    }
    
    processQueue() {
        const availableWorker = this.workers.find(w => !w.busy);
        
        if (!availableWorker || this.taskQueue.length === 0) {
            return;
        }
        
        const task = this.getNextTask();
        this.assignTask(availableWorker, task);
    }
    
    async assignTask(worker, task) {
        this.running.set(task.id, { worker, task, startTime: Date.now() });
        
        try {
            const result = await worker.execute(task);
            this.onWorkerComplete(worker, task, result);
        } catch (error) {
            this.onWorkerError(worker, task, error);
        }
    }
    
    onWorkerComplete(worker, task, result) {
        this.running.delete(task.id);
        
        if (result.success) {
            task.resolve(result.result);
        } else {
            task.reject(result.error);
        }
        
        this.processQueue();
    }
    
    onWorkerError(worker, task, error) {
        this.running.delete(task.id);
        task.reject(error);
        this.processQueue();
    }
    
    getStatus() {
        return {
            poolSize: this.poolSize,
            busyWorkers: this.workers.filter(w => w.busy).length,
            queuedTasks: this.taskQueue.length,
            runningTasks: this.running.size
        };
    }
    
    async shutdown() {
        // Wait for running tasks
        while (this.running.size > 0) {
            await new Promise(r => setTimeout(r, 100));
        }
        this.workers = [];
    }
}


// SOLUTION 3: Performance Monitor

class PerformanceMonitorSolution {
    constructor(options = {}) {
        this.metrics = new Map();
        this.thresholds = options.thresholds || {};
        this.listeners = [];
        this.memorySnapshots = [];
    }
    
    track(name) {
        return (fn) => {
            return async (...args) => {
                const start = performance.now();
                try {
                    return await fn(...args);
                } finally {
                    const duration = performance.now() - start;
                    this.recordTiming(name, duration);
                }
            };
        };
    }
    
    recordTiming(name, duration) {
        if (!this.metrics.has(name)) {
            this.metrics.set(name, []);
        }
        
        this.metrics.get(name).push({
            duration,
            timestamp: Date.now()
        });
        
        // Check thresholds
        if (this.thresholds[name] && duration > this.thresholds[name]) {
            this.alert({
                type: 'threshold_exceeded',
                metric: name,
                value: duration,
                threshold: this.thresholds[name]
            });
        }
    }
    
    getPercentile(name, percentile) {
        const timings = this.metrics.get(name);
        if (!timings || timings.length === 0) return null;
        
        const sorted = [...timings].map(t => t.duration).sort((a, b) => a - b);
        const index = Math.ceil((percentile / 100) * sorted.length) - 1;
        return sorted[Math.max(0, index)];
    }
    
    getStats(name) {
        const timings = this.metrics.get(name);
        if (!timings || timings.length === 0) return null;
        
        const durations = timings.map(t => t.duration);
        const sum = durations.reduce((a, b) => a + b, 0);
        
        return {
            count: durations.length,
            min: Math.min(...durations),
            max: Math.max(...durations),
            avg: sum / durations.length,
            p50: this.getPercentile(name, 50),
            p95: this.getPercentile(name, 95),
            p99: this.getPercentile(name, 99)
        };
    }
    
    getMemoryUsage() {
        if (typeof process !== 'undefined' && process.memoryUsage) {
            const usage = process.memoryUsage();
            return {
                heapUsed: usage.heapUsed,
                heapTotal: usage.heapTotal,
                external: usage.external,
                rss: usage.rss
            };
        }
        return null;
    }
    
    checkForLeaks() {
        const current = this.getMemoryUsage();
        if (!current) return { detected: false };
        
        this.memorySnapshots.push({
            timestamp: Date.now(),
            ...current
        });
        
        // Keep last 10 snapshots
        if (this.memorySnapshots.length > 10) {
            this.memorySnapshots.shift();
        }
        
        // Check for consistent growth
        if (this.memorySnapshots.length >= 5) {
            const first = this.memorySnapshots[0];
            const last = this.memorySnapshots[this.memorySnapshots.length - 1];
            const growth = (last.heapUsed - first.heapUsed) / first.heapUsed;
            
            if (growth > 0.5) { // 50% growth
                return {
                    detected: true,
                    growth: (growth * 100).toFixed(1) + '%',
                    message: 'Potential memory leak detected'
                };
            }
        }
        
        return { detected: false };
    }
    
    onAlert(callback) {
        this.listeners.push(callback);
    }
    
    alert(info) {
        for (const listener of this.listeners) {
            listener(info);
        }
    }
    
    generateReport() {
        const report = {
            timestamp: new Date().toISOString(),
            metrics: {},
            memory: this.getMemoryUsage(),
            leakCheck: this.checkForLeaks()
        };
        
        for (const [name, _] of this.metrics) {
            report.metrics[name] = this.getStats(name);
        }
        
        return report;
    }
}


// SOLUTION 4: Incremental Renderer

class IncrementalRendererSolution {
    constructor(options = {}) {
        this.frameBudget = options.frameBudget || 16;
        this.renderQueue = [];
        this.isRendering = false;
        this.cancelled = new Set();
        this.jobIdCounter = 0;
    }
    
    render(id, items, renderFn, options = {}) {
        const jobId = id || `job-${++this.jobIdCounter}`;
        const priority = options.priority || 0;
        
        return new Promise((resolve, reject) => {
            this.renderQueue.push({
                id: jobId,
                items: [...items],
                renderFn,
                priority,
                index: 0,
                resolve,
                reject,
                onProgress: options.onProgress
            });
            
            // Sort by priority
            this.renderQueue.sort((a, b) => b.priority - a.priority);
            
            if (!this.isRendering) {
                this.startRenderLoop();
            }
        });
    }
    
    startRenderLoop() {
        this.isRendering = true;
        
        const loop = () => {
            if (this.renderQueue.length === 0) {
                this.isRendering = false;
                return;
            }
            
            const frameStart = performance.now();
            let budget = this.frameBudget;
            
            // Process jobs within budget
            while (budget > 0 && this.renderQueue.length > 0) {
                const job = this.renderQueue[0];
                
                if (this.cancelled.has(job.id)) {
                    this.renderQueue.shift();
                    this.cancelled.delete(job.id);
                    continue;
                }
                
                const result = this.processJob(job, budget);
                budget -= result.timeUsed;
                
                if (job.index >= job.items.length) {
                    this.renderQueue.shift();
                    job.resolve({ completed: true, rendered: job.items.length });
                }
            }
            
            // Schedule next frame
            if (typeof requestAnimationFrame !== 'undefined') {
                requestAnimationFrame(loop);
            } else {
                setTimeout(loop, this.frameBudget);
            }
        };
        
        loop();
    }
    
    processJob(job, budget) {
        const startTime = performance.now();
        const startIndex = job.index;
        
        while (job.index < job.items.length) {
            const elapsed = performance.now() - startTime;
            if (elapsed >= budget) break;
            
            job.renderFn(job.items[job.index], job.index);
            job.index++;
        }
        
        // Report progress
        if (job.onProgress) {
            job.onProgress(job.index / job.items.length);
        }
        
        return {
            rendered: job.index - startIndex,
            timeUsed: performance.now() - startTime
        };
    }
    
    cancel(id) {
        this.cancelled.add(id);
    }
    
    getStatus() {
        return {
            isRendering: this.isRendering,
            queueLength: this.renderQueue.length,
            jobs: this.renderQueue.map(j => ({
                id: j.id,
                progress: (j.index / j.items.length * 100).toFixed(1) + '%'
            }))
        };
    }
}


// SOLUTION 5: Smart Data Loader

class SmartDataLoaderSolution {
    constructor(batchFn, options = {}) {
        this.batchFn = batchFn;
        this.batchDelay = options.batchDelay || 10;
        this.maxBatchSize = options.maxBatchSize || 100;
        this.cache = new Map();
        this.pending = new Map();
        this.batch = [];
        this.batchTimer = null;
    }
    
    async load(key) {
        // Check cache
        if (this.cache.has(key)) {
            return this.cache.get(key);
        }
        
        // Check if already pending
        if (this.pending.has(key)) {
            return this.pending.get(key);
        }
        
        // Add to batch
        return new Promise((resolve, reject) => {
            this.batch.push({ key, resolve, reject });
            
            if (this.batch.length >= this.maxBatchSize) {
                this.dispatchBatch();
            } else if (!this.batchTimer) {
                this.batchTimer = setTimeout(() => this.dispatchBatch(), this.batchDelay);
            }
            
            // Create pending promise
            const promise = new Promise((res, rej) => {
                // Will be resolved by dispatchBatch
            });
            this.pending.set(key, promise);
        });
    }
    
    async loadMany(keys) {
        return Promise.all(keys.map(key => this.load(key)));
    }
    
    async dispatchBatch() {
        if (this.batchTimer) {
            clearTimeout(this.batchTimer);
            this.batchTimer = null;
        }
        
        if (this.batch.length === 0) return;
        
        const currentBatch = this.batch;
        this.batch = [];
        
        const keys = currentBatch.map(b => b.key);
        
        try {
            const results = await this.batchFn(keys);
            
            currentBatch.forEach((item, index) => {
                const result = results[index];
                this.cache.set(item.key, result);
                this.pending.delete(item.key);
                item.resolve(result);
            });
        } catch (error) {
            currentBatch.forEach(item => {
                this.pending.delete(item.key);
                item.reject(error);
            });
        }
    }
    
    prime(key, value) {
        this.cache.set(key, value);
    }
    
    clear(key) {
        if (key) {
            this.cache.delete(key);
        } else {
            this.cache.clear();
        }
    }
    
    preload(predictedKeys) {
        // Filter out already cached keys
        const keysToLoad = predictedKeys.filter(key => !this.cache.has(key));
        
        // Load in background
        if (keysToLoad.length > 0) {
            this.loadMany(keysToLoad).catch(() => {});
        }
    }
}

console.log('Solutions loaded. Uncomment test functions to verify implementations.');
*/

module.exports = {
  AdvancedMemoizer,
  WorkerPool,
  SimulatedWorker,
  PerformanceMonitor,
  IncrementalRenderer,
  SmartDataLoader,
};
Exercises - JavaScript Tutorial | DeepML