Design Patterns in Python for Coding Interviews (2025)
Python-specific design pattern implementations come up in senior coding rounds at Stripe, Airbnb, and Netflix. These patterns leverage Python idioms like decorators, context managers, and generators.
Pattern 1: Context Manager (Resource Management)
# The pythonic way to manage resources (files, connections, locks)
class DatabaseConnection:
def __init__(self, url):
self.url = url
self.conn = None
def __enter__(self):
self.conn = connect(self.url)
return self.conn
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type:
self.conn.rollback()
else:
self.conn.commit()
self.conn.close()
return False # don't suppress exceptions
# Usage:
with DatabaseConnection("postgres://localhost/db") as conn:
conn.execute("INSERT INTO users VALUES (?, ?)", [1, "alice"])
# Automatically commits/rolls back and closes connection
# Using contextlib for simpler cases
from contextlib import contextmanager
@contextmanager
def timer(label):
import time
start = time.time()
yield
print(f"{label}: {time.time() - start:.2f}s")
with timer("matrix multiply"):
result = matrix_multiply(A, B)
Pattern 2: Decorator Pattern
import functools, time
# Retry decorator with exponential backoff
def retry(max_attempts=3, base_delay=1.0, exceptions=(Exception,)):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
delay = base_delay
for attempt in range(max_attempts):
try:
return func(*args, **kwargs)
except exceptions as e:
if attempt == max_attempts - 1:
raise
print(f"Attempt {attempt+1} failed: {e}. Retrying in {delay}s")
time.sleep(delay)
delay *= 2 # exponential backoff
return wrapper
return decorator
@retry(max_attempts=3, exceptions=(ConnectionError,))
def fetch_data(url):
# may raise ConnectionError
return requests.get(url).json()
# Cache decorator (memoization)
def memoize(func):
cache = {}
@functools.wraps(func)
def wrapper(*args):
if args not in cache:
cache[args] = func(*args)
return cache[args]
return wrapper
@memoize
def fibonacci(n):
if n <= 1: return n
return fibonacci(n-1) + fibonacci(n-2)
Pattern 3: Observer (Event System)
from typing import Callable, Dict, List
class EventBus:
def __init__(self):
self._listeners: Dict[str, List[Callable]] = {}
def subscribe(self, event: str, handler: Callable):
self._listeners.setdefault(event, []).append(handler)
return lambda: self._listeners[event].remove(handler) # unsubscribe fn
def publish(self, event: str, data=None):
for handler in self._listeners.get(event, []):
handler(data)
# Usage in an e-commerce system
bus = EventBus()
unsubscribe = bus.subscribe("order_placed", lambda d: send_email(d["email"]))
bus.subscribe("order_placed", lambda d: update_inventory(d["items"]))
bus.subscribe("order_placed", lambda d: charge_payment(d["payment"]))
bus.publish("order_placed", {"email": "user@example.com", "items": [...], "payment": {...}})
Pattern 4: Iterator and Generator
# Custom iterator for paginated API
class PaginatedAPI:
def __init__(self, url, page_size=100):
self.url = url
self.page_size = page_size
self.page = 0
def __iter__(self):
return self
def __next__(self):
response = api_call(self.url, page=self.page, size=self.page_size)
if not response["items"]:
raise StopIteration
self.page += 1
return response["items"]
# Generator version (more Pythonic)
def paginated_api(url, page_size=100):
page = 0
while True:
response = api_call(url, page=page, size=page_size)
if not response["items"]: break
yield from response["items"]
page += 1
# Infinite stream processing with generators
def read_log_stream(filename):
with open(filename) as f:
while True:
line = f.readline()
if line: yield line
else: time.sleep(0.1) # wait for new lines
def parse_errors(lines):
for line in lines:
if "ERROR" in line: yield parse_log_line(line)
def alert_on_high_rate(errors, threshold=10):
window = []
for error in errors:
window.append(error)
if len(window) > threshold: window.pop(0)
if len(window) == threshold:
alert(f"High error rate: {threshold} errors")
# Pipeline: alert_on_high_rate(parse_errors(read_log_stream("app.log")))
Pattern 5: Dataclass + Builder
from dataclasses import dataclass, field
from typing import Optional, List
@dataclass
class QueryConfig:
table: str
columns: List[str] = field(default_factory=lambda: ["*"])
where: Optional[str] = None
order_by: Optional[str] = None
limit: Optional[int] = None
offset: int = 0
def to_sql(self) -> str:
cols = ", ".join(self.columns)
sql = f"SELECT {cols} FROM {self.table}"
if self.where: sql += f" WHERE {self.where}"
if self.order_by: sql += f" ORDER BY {self.order_by}"
if self.limit: sql += f" LIMIT {self.limit}"
if self.offset: sql += f" OFFSET {self.offset}"
return sql
# Builder pattern using method chaining
class QueryBuilder:
def __init__(self): self._config = QueryConfig(table="")
def from_table(self, t): self._config.table = t; return self
def select(self, *cols): self._config.columns = list(cols); return self
def where(self, cond): self._config.where = cond; return self
def order_by(self, col): self._config.order_by = col; return self
def limit(self, n): self._config.limit = n; return self
def build(self): return self._config.to_sql()
query = (QueryBuilder()
.from_table("users")
.select("id", "name", "email")
.where("age > 18")
.order_by("created_at DESC")
.limit(100)
.build())
Pattern 6: Dependency Injection
from abc import ABC, abstractmethod
# Interface
class NotificationService(ABC):
@abstractmethod
def send(self, to: str, message: str): pass
# Implementations
class EmailService(NotificationService):
def send(self, to, message): print(f"Email to {to}: {message}")
class SMSService(NotificationService):
def send(self, to, message): print(f"SMS to {to}: {message}")
# Consumer depends on abstraction, not implementation
class OrderProcessor:
def __init__(self, notifier: NotificationService):
self.notifier = notifier # injected
def process(self, order):
# ... process order
self.notifier.send(order.email, f"Order {order.id} confirmed!")
# Wire up in application entry point:
processor = OrderProcessor(notifier=EmailService())
# Easy to test: OrderProcessor(notifier=MockNotifier())
Key Interview Tips
- Use @functools.wraps when writing decorators to preserve function metadata
- Context managers are the Pythonic way to handle resources – mention them instead of try/finally
- Generators enable lazy evaluation and infinite sequences without memory overhead
- Dependency injection makes code testable – inject mocks in tests
- Dataclasses reduce boilerplate for value objects; prefer over plain classes for data containers