Docs / Web Servers / Configure Nginx Microcaching for Dynamic Content

Configure Nginx Microcaching for Dynamic Content

By Admin · Mar 15, 2026 · Updated Apr 25, 2026 · 171 views · 3 min read

Microcaching is the technique of caching dynamic content for very short periods (1-10 seconds). Even a 1-second cache eliminates the backend load from traffic spikes — 1,000 requests per second become just 1 backend request. This guide covers implementing microcaching in Nginx for dynamic applications.

Basic Microcaching Setup

# /etc/nginx/conf.d/microcache.conf
# Define the cache zone
proxy_cache_path /var/cache/nginx/microcache
    levels=1:2
    keys_zone=microcache:10m
    max_size=1g
    inactive=10m
    use_temp_path=off;

server {
    listen 443 ssl http2;
    server_name example.com;

    # Default: cache for 1 second
    set $cache_duration 1s;

    # Don't cache POST requests or authenticated users
    set $skip_cache 0;
    if ($request_method = POST) { set $skip_cache 1; }
    if ($http_cookie ~* "session_id|logged_in|wordpress_logged_in") { set $skip_cache 1; }

    location / {
        proxy_pass http://127.0.0.1:8080;
        proxy_set_header Host $host;
        proxy_set_header X-Real-IP $remote_addr;

        # Microcaching configuration
        proxy_cache microcache;
        proxy_cache_valid 200 301 302 $cache_duration;
        proxy_cache_valid 404 10s;
        proxy_cache_use_stale error timeout updating http_500 http_502 http_503;
        proxy_cache_lock on;
        proxy_cache_lock_timeout 5s;
        proxy_cache_lock_age 5s;
        proxy_cache_bypass $skip_cache;
        proxy_no_cache $skip_cache;

        # Add cache status header for debugging
        add_header X-Cache-Status $upstream_cache_status always;

        # Cache key
        proxy_cache_key "$scheme$host$request_uri";
    }

    # Never cache admin areas
    location /admin/ {
        proxy_pass http://127.0.0.1:8080;
        proxy_no_cache 1;
        proxy_cache_bypass 1;
    }

    # Longer cache for API endpoints that change infrequently
    location /api/catalog/ {
        proxy_pass http://127.0.0.1:8080;
        proxy_cache microcache;
        proxy_cache_valid 200 10s;  # 10-second cache for catalog
        add_header X-Cache-Status $upstream_cache_status always;
    }
}

Understanding Cache Behavior

# X-Cache-Status header values:
# HIT      — served from cache
# MISS     — fetched from backend, now cached
# BYPASS   — cache was bypassed (POST, authenticated)
# EXPIRED  — cache entry expired, fetched fresh
# STALE    — served stale content (backend error)
# UPDATING — stale content served while refreshing

# proxy_cache_lock ON is critical:
# Without it: 1000 concurrent requests = 1000 backend requests
# With it: 1000 concurrent requests = 1 backend request + 999 wait for cache

# proxy_cache_use_stale is your safety net:
# If the backend is down, serve the last cached version instead of 502

Advanced Patterns

# Vary cache by Accept-Encoding (for API versioning)
proxy_cache_key "$scheme$host$request_uri$http_accept";

# Cache based on query parameters
proxy_cache_key "$scheme$host$uri$is_args$args";

# Purge cache (requires nginx-cache-purge module)
location ~ /purge(/.*) {
    allow 127.0.0.1;
    deny all;
    proxy_cache_purge microcache "$scheme$host$1";
}

# Stale-while-revalidate (serve stale, update in background)
proxy_cache_background_update on;
proxy_cache_use_stale updating;

# Different cache durations per content type
map $uri $cache_ttl {
    ~*\.json$  5s;
    ~*\.html$  1s;
    ~*/feed    30s;
    default    1s;
}
proxy_cache_valid 200 $cache_ttl;

Monitor Cache Performance

# Check cache hit rate from logs
awk '{print $NF}' /var/log/nginx/access.log | sort | uniq -c | sort -rn
# Assuming X-Cache-Status is logged as the last field

# Add to log format:
log_format cache '$remote_addr - [$time_local] "$request" $status '
    '$body_bytes_sent $upstream_cache_status $upstream_response_time';

# Check cache directory
du -sh /var/cache/nginx/microcache/
ls /var/cache/nginx/microcache/ | wc -l

Best Practices

  • Start with 1-second cache: Even 1 second eliminates thundering herd problems
  • Always enable proxy_cache_lock: Prevents multiple simultaneous backend requests for the same resource
  • Use proxy_cache_use_stale: Serves old content during backend failures instead of errors
  • Skip cache for authenticated users to prevent serving personalized content to wrong users
  • Add X-Cache-Status header for debugging and monitoring cache effectiveness
  • Enable background updates for seamless cache refresh without user-visible latency

Was this article helpful?