from celery import shared_task
from django.core.mail import send_mail
from django.contrib.auth import get_user_model
User = get_user_model()
@shared_task(bind=True, autoretry_for=(Exception,), retry_kwargs={'max_retries': 3}, retry_backoff=True)
def send_welcome_email_task(self, user_id):
"""Send welcome email asynchronously."""
try:
user = User.objects.get(id=user_id)
send_mail(
subject='Welcome!',
message=f'Hi {user.first_name}, thanks for joining!',
from_email='noreply@example.com',
recipient_list=[user.email],
)
except User.DoesNotExist:
# Don't retry if user was deleted
return
from django.contrib.auth import get_user_model
from rest_framework import generics
from .serializers import UserSerializer
from core.tasks import send_welcome_email_task
User = get_user_model()
class UserCreateView(generics.CreateAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
def perform_create(self, serializer):
user = serializer.save()
# Queue email task - returns immediately
send_welcome_email_task.delay(user.id)
I use Celery for any operation that might be slow or fail intermittently, like sending emails. By decorating with @shared_task, I make tasks reusable across different apps. I set bind=True to access task instance (useful for retries), and configure retry logic with autoretry_for and exponential backoff. The countdown parameter delays execution. I avoid passing complex objects to tasks—use IDs and fetch from DB instead. This prevents serialization issues and ensures fresh data. For critical tasks, I add monitoring and alerting on failures.