class UserNotificationJob < ApplicationJob
queue_as :default
# Retry up to 5 times with exponential backoff
retry_on StandardError, wait: :exponentially_longer, attempts: 5
# Discard job if user not found
discard_on ActiveRecord::RecordNotFound
def perform(user_id, notification_type, data = {})
user = User.find(user_id)
case notification_type
when 'email'
send_email_notification(user, data)
when 'push'
send_push_notification(user, data)
when 'sms'
send_sms_notification(user, data)
end
track_notification_sent(user, notification_type)
end
private
def send_email_notification(user, data)
UserMailer.notification(user, data).deliver_now
end
def send_push_notification(user, data)
PushNotificationService.new(user).send(data[:message])
end
def send_sms_notification(user, data)
TwilioService.send_sms(user.phone, data[:message])
end
def track_notification_sent(user, type)
Analytics.track(
user_id: user.id,
event: 'notification_sent',
properties: { type: type }
)
end
end
# Usage:
# Enqueue immediately
# UserNotificationJob.perform_later(user.id, 'email', { subject: 'Welcome!' })
#
# Enqueue with delay
# UserNotificationJob.set(wait: 1.hour).perform_later(user.id, 'email', data)
#
# Schedule for specific time
# UserNotificationJob.set(wait_until: Date.tomorrow.noon).perform_later(user.id, 'push', data)
class ReportGeneratorJob < ApplicationJob
queue_as :reports
# Custom retry logic
retry_on ReportGenerationError, wait: 5.minutes, attempts: 3 do |job, error|
# Notify admin on final failure
AdminMailer.job_failed(job, error).deliver_now
end
def perform(report_id)
report = Report.find(report_id)
report.update!(status: 'processing')
csv_data = generate_csv_data(report)
file = upload_to_storage(csv_data, report)
report.update!(
status: 'completed',
file_url: file.url,
completed_at: Time.current
)
notify_user(report)
rescue StandardError => e
report.update!(
status: 'failed',
error_message: e.message
)
raise
end
private
def generate_csv_data(report)
CSV.generate do |csv|
csv << ['ID', 'Name', 'Email', 'Created At']
report.data_scope.find_each do |record|
csv << [record.id, record.name, record.email, record.created_at]
end
end
end
def upload_to_storage(csv_data, report)
filename = "report_#{report.id}_#{Time.current.to_i}.csv"
file = StringIO.new(csv_data)
report.file.attach(
io: file,
filename: filename,
content_type: 'text/csv'
)
report.file
end
def notify_user(report)
ReportMailer.ready(report).deliver_later
end
end
class BatchProcessorJob < ApplicationJob
queue_as :batch
BATCH_SIZE = 100
def perform(model_name, ids)
model = model_name.constantize
model.where(id: ids).find_in_batches(batch_size: BATCH_SIZE) do |batch|
batch.each do |record|
process_record(record)
end
end
end
private
def process_record(record)
# Process individual record
record.process!
rescue StandardError => e
Rails.logger.error("Failed to process #{record.class.name} #{record.id}: #{e.message}")
end
end
# Sidekiq-specific features
class HighPriorityJob
include Sidekiq::Job
sidekiq_options queue: :critical, retry: 10, backtrace: true
def perform(user_id)
# Critical operation
end
end
# Scheduled job (requires sidekiq-scheduler gem)
class DailyCleanupJob
include Sidekiq::Job
sidekiq_options queue: :maintenance
def perform
User.inactive.where('last_login_at < ?', 1.year.ago).destroy_all
Session.expired.delete_all
end
end
Sidekiq processes background jobs asynchronously using Redis and multi-threading. ActiveJob provides framework-agnostic interface—I use it for portability between job processors. Jobs handle emails, data processing, API calls, report generation. perform_later enqueues jobs; perform_now executes synchronously. I set priorities and queues for job organization. Retry logic with exponential backoff handles transient failures. Dead job queues capture permanent failures. Unique jobs prevent duplicates using sidekiq-unique-jobs gem. Scheduled jobs use set(wait:) or cron schedules. Batch processing groups related jobs. Monitoring via Sidekiq Web UI tracks throughput and failures. Proper job design—idempotent, atomic operations—ensures reliability. Background processing improves response times and user experience.