<%# Fragment caching - caches rendered HTML %>
<% cache @product do %>
<h1><%= @product.name %></h1>
<p><%= @product.description %></p>
<p>Price: $<%= @product.price %></p>
<% end %>
<%# Cache with specific key %>
<% cache ['products', @product.id, @product.updated_at] do %>
<%= render @product %>
<% end %>
<%# Russian doll caching - nested fragments %>
<% cache @post do %>
<h1><%= @post.title %></h1>
<% cache ['comments', @post.comments.maximum(:updated_at)] do %>
<% @post.comments.each do |comment| %>
<% cache comment do %>
<%= render comment %>
<% end %>
<% end %>
<% end %>
<% end %>
<%# Conditional caching %>
<% cache_if user_signed_in?, @product do %>
<%= render 'detailed_product', product: @product %>
<% end %>
<%# Collection caching %>
<%= render partial: 'products/product', collection: @products, cached: true %>
# Fetch or compute pattern
def expensive_computation(user_id)
Rails.cache.fetch("user/#{user_id}/stats", expires_in: 1.hour) do
# This block only runs on cache miss
calculate_user_statistics(user_id)
end
end
# Write to cache
Rails.cache.write('user/123/profile', user.profile, expires_in: 30.minutes)
# Read from cache
profile = Rails.cache.read('user/123/profile')
# Delete from cache
Rails.cache.delete('user/123/profile')
# Multi-key operations
Rails.cache.read_multi('user/1', 'user/2', 'user/3')
Rails.cache.write_multi({ 'user/1' => data1, 'user/2' => data2 })
# Increment/Decrement (for counters)
Rails.cache.increment('page_views')
Rails.cache.decrement('items_in_stock')
# Conditional write
Rails.cache.fetch('key', race_condition_ttl: 5.seconds) do
# Prevents cache stampede
expensive_operation
end
# Cache exists check
if Rails.cache.exist?('key')
Rails.cache.read('key')
else
# Cache miss logic
end
class Post < ApplicationRecord
belongs_to :user, touch: true # Updates post's updated_at when user changes
has_many :comments, dependent: :destroy
# Counter cache
has_many :comments, dependent: :destroy
belongs_to :user, counter_cache: true # Adds posts_count to users table
# Cached method
def expensive_calculation
Rails.cache.fetch([cache_key, __method__]) do
# Complex calculation here
sleep(2)
'result'
end
end
# Cache key with version
def cache_key_with_version
"posts/#{id}-#{updated_at.to_i}/v2"
end
# Clear cache on update
after_commit :clear_cache
private
def clear_cache
Rails.cache.delete([cache_key, 'expensive_calculation'])
end
end
# Query caching (automatic in Rails for same query in request)
Post.where(published: true).load # Hits database
Post.where(published: true).load # Returns cached result
# Disable query cache
Post.uncached do
Post.where(published: true).load # Always hits database
end
# Cache across requests with scopes
class Product < ApplicationRecord
def self.featured
Rails.cache.fetch('products/featured', expires_in: 1.hour) do
where(featured: true).includes(:images).to_a
end
end
end
# ETags for HTTP caching (in controller)
class ProductsController < ApplicationController
def show
@product = Product.find(params[:id])
fresh_when(@product) # Sets ETag and Last-Modified
# or
if stale?(@product)
respond_to do |format|
format.html
format.json { render json: @product }
end
end
end
end
# config/environments/production.rb
config.cache_store = :redis_cache_store, {
url: ENV['REDIS_URL'],
connect_timeout: 30,
read_timeout: 0.2,
write_timeout: 0.2,
reconnect_attempts: 1,
error_handler: -> (method:, returning:, exception:) {
Rails.logger.error("Redis cache error: #{exception.message}")
},
namespace: 'myapp',
expires_in: 1.hour
}
# Use different cache stores
config.cache_store = :memory_store, { size: 64.megabytes }
config.cache_store = :file_store, Rails.root.join('tmp/cache')
config.cache_store = :mem_cache_store, 'localhost:11211'
config.cache_store = :null_store # Disable caching
# Multiple cache stores
config.cache_store = :multi_store, :memory_store, :redis_cache_store
Rails caching dramatically improves performance by avoiding expensive computations and queries. Fragment caching caches view partials. Russian doll caching nests cache fragments for efficient invalidation. Low-level caching stores arbitrary data. Rails.cache.fetch simplifies cache-or-compute pattern. Cache keys use model touch for automatic invalidation. I use memcached or Redis for distributed caching. Counter caches avoid COUNT queries. Query result caching prevents duplicate queries in request. HTTP caching with ETags and Last-Modified headers reduces server load. Cache sweepers invalidate stale data. Proper caching strategy balances freshness against performance. Understanding cache invalidation is crucial—Phil Karlton said naming and cache invalidation are the two hard problems in computer science.