class OutboxEvent < ApplicationRecord
enum status: { pending: 0, publishing: 1, published: 2, failed: 3 }
validates :event_name, :aggregate_type, :aggregate_id, presence: true
validates :dedupe_key, presence: true, uniqueness: true
scope :ready, -> { pending.order(id: :asc) }
end
class Orders::Place
def initialize(customer:, params:)
@customer = customer
@params = params
end
def call
ApplicationRecord.transaction do
order = Order.create!(@params.merge(customer: @customer))
OutboxEvent.create!(
event_name: 'order.placed',
aggregate_type: 'Order',
aggregate_id: order.id,
dedupe_key: "order:#{order.id}:placed",
payload: {
order_id: order.id,
customer_id: @customer.id,
total_cents: order.total_cents
}
)
order
end
end
end
class OutboxPublisherJob < ApplicationJob
queue_as :default
def perform(batch_size: 200)
OutboxEvent.ready.limit(batch_size).find_each do |event|
next unless event.with_lock { event.update(status: :publishing) }
deliver(event)
event.update!(status: :published, published_at: Time.current)
rescue StandardError => e
event.update!(status: :failed, last_error: e.class.name)
raise
end
end
private
def deliver(event)
ActiveSupport::Notifications.instrument('outbox.deliver', event_name: event.event_name) do
EventBus.publish(event.event_name, event.payload, idempotency_key: event.dedupe_key)
end
end
end
I used a transactional outbox when I needed my database write and my event publish to succeed or fail together. In OutboxEvent model, I treated the outbox like a queue: a durable row per event, a dedupe_key for idempotency, and a ready scope that pulls pending rows in a stable order. Next, in Write + outbox in one transaction, I wrapped Order.create! and OutboxEvent.create! inside a single ApplicationRecord.transaction, and I made the payload explicit so replays and backfills are predictable. Finally, in Publisher worker, I process small batches, transition to publishing under with_lock, publish under ActiveSupport::Notifications, and only then mark published; failures get recorded as failed for safe retries.