Docs / Performance Optimization / Edge Caching Cloudflare Workers

Edge Caching Cloudflare Workers

By Admin · Mar 15, 2026 · Updated Apr 24, 2026 · 145 views · 6 min read

Cloudflare Workers run JavaScript at over 300 edge locations worldwide, executing within milliseconds of your users. By implementing edge caching logic in Workers, you can serve dynamic content from the edge, reduce origin server load by 90%+, and deliver sub-50ms response times globally. This guide covers practical Worker patterns for edge caching beyond what Cloudflare's standard cache rules can achieve.

Getting Started

# Install Wrangler CLI
npm install -g wrangler

# Authenticate
wrangler login

# Create a new Worker project
wrangler init edge-cache
cd edge-cache

Basic Edge Caching Worker

// src/index.js
export default {
    async fetch(request, env, ctx) {
        const url = new URL(request.url);
        const cacheKey = new Request(url.toString(), request);
        const cache = caches.default;

        // Check edge cache first
        let response = await cache.match(cacheKey);
        if (response) {
            // Cache HIT — add header for debugging
            response = new Response(response.body, response);
            response.headers.set('X-Cache', 'HIT');
            return response;
        }

        // Cache MISS — fetch from origin
        response = await fetch(request);

        // Only cache successful responses
        if (response.ok) {
            const cachedResponse = new Response(response.body, response);
            cachedResponse.headers.set('Cache-Control', 'public, max-age=60');
            cachedResponse.headers.set('X-Cache', 'MISS');

            // Store in edge cache (non-blocking)
            ctx.waitUntil(cache.put(cacheKey, cachedResponse.clone()));

            return cachedResponse;
        }

        return response;
    },
};

Smart Cache Key Generation

// Cache different versions based on device, country, or other factors
export default {
    async fetch(request, env, ctx) {
        const url = new URL(request.url);
        const cache = caches.default;

        // Build cache key with relevant variations
        const device = request.headers.get('CF-Device-Type') || 'desktop';
        const country = request.headers.get('CF-IPCountry') || 'US';

        // Create unique cache key per device type + country
        const cacheUrl = new URL(url.toString());
        cacheUrl.searchParams.set('_device', device);
        cacheUrl.searchParams.set('_country', country);
        const cacheKey = new Request(cacheUrl.toString());

        let response = await cache.match(cacheKey);
        if (response) {
            response = new Response(response.body, response);
            response.headers.set('X-Cache', 'HIT');
            response.headers.set('X-Cache-Key', `${device}-${country}`);
            return response;
        }

        // Add device/country headers for origin to generate correct response
        const modifiedRequest = new Request(request);
        modifiedRequest.headers.set('X-Device-Type', device);
        modifiedRequest.headers.set('X-Country', country);

        response = await fetch(modifiedRequest);

        if (response.ok) {
            const cached = new Response(response.body, response);
            cached.headers.set('Cache-Control', 'public, max-age=300');
            cached.headers.set('X-Cache', 'MISS');
            ctx.waitUntil(cache.put(cacheKey, cached.clone()));
            return cached;
        }

        return response;
    },
};

Stale-While-Revalidate Pattern

// Serve stale content immediately while refreshing in background
export default {
    async fetch(request, env, ctx) {
        const cache = caches.default;
        const cacheKey = new Request(request.url, request);

        const cached = await cache.match(cacheKey);

        if (cached) {
            const age = parseInt(cached.headers.get('X-Cache-Age') || '0');
            const maxAge = 60;  // 1 minute fresh
            const staleAge = 3600;  // 1 hour stale OK

            if (age < maxAge) {
                // Still fresh
                return addHeaders(cached, 'HIT-FRESH');
            } else if (age < staleAge) {
                // Stale but usable — serve stale, revalidate in background
                ctx.waitUntil(revalidate(request, cacheKey, cache));
                return addHeaders(cached, 'HIT-STALE');
            }
        }

        // No cache or too stale — fetch fresh
        return fetchAndCache(request, cacheKey, cache, ctx);
    },
};

async function revalidate(request, cacheKey, cache) {
    const response = await fetch(request);
    if (response.ok) {
        const cached = addTimestamp(response);
        await cache.put(cacheKey, cached);
    }
}

async function fetchAndCache(request, cacheKey, cache, ctx) {
    const response = await fetch(request);
    if (response.ok) {
        const cached = addTimestamp(response);
        ctx.waitUntil(cache.put(cacheKey, cached.clone()));
        return addHeaders(cached, 'MISS');
    }
    return response;
}

function addTimestamp(response) {
    const r = new Response(response.body, response);
    r.headers.set('X-Cache-Time', Date.now().toString());
    return r;
}

function addHeaders(response, status) {
    const r = new Response(response.body, response);
    const cacheTime = parseInt(r.headers.get('X-Cache-Time') || '0');
    r.headers.set('X-Cache', status);
    r.headers.set('X-Cache-Age', Math.floor((Date.now() - cacheTime) / 1000).toString());
    return r;
}

API Response Caching

// Cache API responses with short TTLs and cache-busting
export default {
    async fetch(request, env, ctx) {
        const url = new URL(request.url);

        // Only cache GET requests to /api/
        if (request.method !== 'GET' || !url.pathname.startsWith('/api/')) {
            return fetch(request);
        }

        // Skip cache for authenticated requests
        if (request.headers.get('Authorization')) {
            return fetch(request);
        }

        const cache = caches.default;
        const cacheKey = new Request(url.toString());

        let response = await cache.match(cacheKey);
        if (response) {
            return response;
        }

        response = await fetch(request);

        if (response.ok) {
            const cached = new Response(response.body, response);
            // Short TTL for API data
            cached.headers.set('Cache-Control', 'public, max-age=10, s-maxage=30');
            ctx.waitUntil(cache.put(cacheKey, cached.clone()));
            return cached;
        }

        return response;
    },
};

Cache Purging

// Purge via Cloudflare API from your application
async function purgeCache(urls) {
    const response = await fetch(
        `https://api.cloudflare.com/client/v4/zones/${ZONE_ID}/purge_cache`,
        {
            method: 'POST',
            headers: {
                'Authorization': `Bearer ${CF_API_TOKEN}`,
                'Content-Type': 'application/json',
            },
            body: JSON.stringify({ files: urls }),
        }
    );
    return response.json();
}

// Purge everything
async function purgeAll() {
    return fetch(
        `https://api.cloudflare.com/client/v4/zones/${ZONE_ID}/purge_cache`,
        {
            method: 'POST',
            headers: {
                'Authorization': `Bearer ${CF_API_TOKEN}`,
                'Content-Type': 'application/json',
            },
            body: JSON.stringify({ purge_everything: true }),
        }
    );
}

// Call from your CMS/app when content changes:
// await purgeCache(['https://example.com/blog/updated-post']);

KV Store for Edge State

// Use Workers KV for configuration and longer-lived cache
// wrangler.toml:
// [[kv_namespaces]]
// binding = "CACHE"
// id = "abc123"

export default {
    async fetch(request, env, ctx) {
        const url = new URL(request.url);
        const key = `page:${url.pathname}`;

        // Check KV cache (persists across edge restarts)
        const cached = await env.CACHE.get(key, { type: 'text' });
        if (cached) {
            return new Response(cached, {
                headers: {
                    'Content-Type': 'text/html',
                    'X-Cache': 'KV-HIT',
                    'Cache-Control': 'public, max-age=60',
                },
            });
        }

        const response = await fetch(request);
        const body = await response.text();

        if (response.ok) {
            // Store in KV with 5-minute TTL
            ctx.waitUntil(
                env.CACHE.put(key, body, { expirationTtl: 300 })
            );
        }

        return new Response(body, response);
    },
};

Deployment

# Deploy Worker
wrangler deploy

# Preview locally
wrangler dev

# Add route in wrangler.toml
# routes = [
#     { pattern = "example.com/*", zone_name = "example.com" }
# ]

# Monitor in dashboard
# Workers & Pages > your-worker > Analytics
# Shows: requests, CPU time, errors, and cache hit rate

Summary

Cloudflare Workers enable caching strategies that are impossible with traditional CDN cache rules. The stale-while-revalidate pattern ensures users always get instant responses while content stays fresh in the background. Smart cache keys enable caching personalized content by device, location, or other factors. For API endpoints, even short 10-30 second cache TTLs at the edge can reduce origin load by 90%+ during traffic spikes. Combined with Workers KV for persistent edge state, you can build a globally distributed caching layer that keeps your origin server's load minimal regardless of traffic volume.

Was this article helpful?