from products.models import Product
def import_products_bulk(product_data):
"""Import thousands of products efficiently."""
products = [
Product(
name=data['name'],
price=data['price'],
sku=data['sku']
)
for data in product_data
]
# Insert all at once
Product.objects.bulk_create(products, batch_size=1000)
def update_prices_bulk(price_updates):
"""Update many products' prices."""
products = []
for product_id, new_price in price_updates.items():
product = Product(id=product_id, price=new_price)
products.append(product)
# Update all at once
Product.objects.bulk_update(products, ['price'], batch_size=500)
def discount_category(category_id, discount_percent):
"""Apply discount to all products in category."""
from django.db.models import F
Product.objects.filter(category_id=category_id).update(
price=F('price') * (1 - discount_percent / 100)
)
def process_large_queryset():
"""Process millions of records without memory issues."""
for product in Product.objects.iterator(chunk_size=2000):
# Process one at a time
process_product(product)
Bulk operations reduce database round-trips dramatically. I use bulk_create() for inserting many objects at once, bulk_update() for updates, and update() for queryset-level updates. These bypass save() methods and signals for speed. For large datasets, I batch operations with batch_size parameter. I use iterator() to stream large querysets without loading all into memory. The delete() method on querysets deletes in bulk. These patterns are essential for data imports, exports, and batch processing tasks.