Python Development Patterns
Idiomatic Python patterns and best practices for building robust, efficient, and maintainable applications.
When to Activate
- Writing new Python code
- Reviewing Python code
- Refactoring existing Python code
- Designing Python packages/modules
Core Principles
1. Readability Counts
Python prioritizes readability. Code should be obvious and easy to understand.
python
1# Good: Clear and readable
2def get_active_users(users: list[User]) -> list[User]:
3 """Return only active users from the provided list."""
4 return [user for user in users if user.is_active]
5
6
7# Bad: Clever but confusing
8def get_active_users(u):
9 return [x for x in u if x.a]
2. Explicit is Better Than Implicit
Avoid magic; be clear about what your code does.
python
1# Good: Explicit configuration
2import logging
3
4logging.basicConfig(
5 level=logging.INFO,
6 format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
7)
8
9# Bad: Hidden side effects
10import some_module
11some_module.setup() # What does this do?
3. EAFP - Easier to Ask Forgiveness Than Permission
Python prefers exception handling over checking conditions.
python
1# Good: EAFP style
2def get_value(dictionary: dict, key: str) -> Any:
3 try:
4 return dictionary[key]
5 except KeyError:
6 return default_value
7
8# Bad: LBYL (Look Before You Leap) style
9def get_value(dictionary: dict, key: str) -> Any:
10 if key in dictionary:
11 return dictionary[key]
12 else:
13 return default_value
Type Hints
Basic Type Annotations
python
1from typing import Optional, List, Dict, Any
2
3def process_user(
4 user_id: str,
5 data: Dict[str, Any],
6 active: bool = True
7) -> Optional[User]:
8 """Process a user and return the updated User or None."""
9 if not active:
10 return None
11 return User(user_id, data)
Modern Type Hints (Python 3.9+)
python
1# Python 3.9+ - Use built-in types
2def process_items(items: list[str]) -> dict[str, int]:
3 return {item: len(item) for item in items}
4
5# Python 3.8 and earlier - Use typing module
6from typing import List, Dict
7
8def process_items(items: List[str]) -> Dict[str, int]:
9 return {item: len(item) for item in items}
Type Aliases and TypeVar
python
1from typing import TypeVar, Union
2
3# Type alias for complex types
4JSON = Union[dict[str, Any], list[Any], str, int, float, bool, None]
5
6def parse_json(data: str) -> JSON:
7 return json.loads(data)
8
9# Generic types
10T = TypeVar('T')
11
12def first(items: list[T]) -> T | None:
13 """Return the first item or None if list is empty."""
14 return items[0] if items else None
Protocol-Based Duck Typing
python
1from typing import Protocol
2
3class Renderable(Protocol):
4 def render(self) -> str:
5 """Render the object to a string."""
6
7def render_all(items: list[Renderable]) -> str:
8 """Render all items that implement the Renderable protocol."""
9 return "\n".join(item.render() for item in items)
Error Handling Patterns
Specific Exception Handling
python
1# Good: Catch specific exceptions
2def load_config(path: str) -> Config:
3 try:
4 with open(path) as f:
5 return Config.from_json(f.read())
6 except FileNotFoundError as e:
7 raise ConfigError(f"Config file not found: {path}") from e
8 except json.JSONDecodeError as e:
9 raise ConfigError(f"Invalid JSON in config: {path}") from e
10
11# Bad: Bare except
12def load_config(path: str) -> Config:
13 try:
14 with open(path) as f:
15 return Config.from_json(f.read())
16 except:
17 return None # Silent failure!
Exception Chaining
python
1def process_data(data: str) -> Result:
2 try:
3 parsed = json.loads(data)
4 except json.JSONDecodeError as e:
5 # Chain exceptions to preserve the traceback
6 raise ValueError(f"Failed to parse data: {data}") from e
Custom Exception Hierarchy
python
1class AppError(Exception):
2 """Base exception for all application errors."""
3 pass
4
5class ValidationError(AppError):
6 """Raised when input validation fails."""
7 pass
8
9class NotFoundError(AppError):
10 """Raised when a requested resource is not found."""
11 pass
12
13# Usage
14def get_user(user_id: str) -> User:
15 user = db.find_user(user_id)
16 if not user:
17 raise NotFoundError(f"User not found: {user_id}")
18 return user
Context Managers
Resource Management
python
1# Good: Using context managers
2def process_file(path: str) -> str:
3 with open(path, 'r') as f:
4 return f.read()
5
6# Bad: Manual resource management
7def process_file(path: str) -> str:
8 f = open(path, 'r')
9 try:
10 return f.read()
11 finally:
12 f.close()
Custom Context Managers
python
1from contextlib import contextmanager
2
3@contextmanager
4def timer(name: str):
5 """Context manager to time a block of code."""
6 start = time.perf_counter()
7 yield
8 elapsed = time.perf_counter() - start
9 print(f"{name} took {elapsed:.4f} seconds")
10
11# Usage
12with timer("data processing"):
13 process_large_dataset()
Context Manager Classes
python
1class DatabaseTransaction:
2 def __init__(self, connection):
3 self.connection = connection
4
5 def __enter__(self):
6 self.connection.begin_transaction()
7 return self
8
9 def __exit__(self, exc_type, exc_val, exc_tb):
10 if exc_type is None:
11 self.connection.commit()
12 else:
13 self.connection.rollback()
14 return False # Don't suppress exceptions
15
16# Usage
17with DatabaseTransaction(conn):
18 user = conn.create_user(user_data)
19 conn.create_profile(user.id, profile_data)
Comprehensions and Generators
List Comprehensions
python
1# Good: List comprehension for simple transformations
2names = [user.name for user in users if user.is_active]
3
4# Bad: Manual loop
5names = []
6for user in users:
7 if user.is_active:
8 names.append(user.name)
9
10# Complex comprehensions should be expanded
11# Bad: Too complex
12result = [x * 2 for x in items if x > 0 if x % 2 == 0]
13
14# Good: Use a generator function
15def filter_and_transform(items: Iterable[int]) -> list[int]:
16 result = []
17 for x in items:
18 if x > 0 and x % 2 == 0:
19 result.append(x * 2)
20 return result
Generator Expressions
python
1# Good: Generator for lazy evaluation
2total = sum(x * x for x in range(1_000_000))
3
4# Bad: Creates large intermediate list
5total = sum([x * x for x in range(1_000_000)])
Generator Functions
python
1def read_large_file(path: str) -> Iterator[str]:
2 """Read a large file line by line."""
3 with open(path) as f:
4 for line in f:
5 yield line.strip()
6
7# Usage
8for line in read_large_file("huge.txt"):
9 process(line)
Data Classes and Named Tuples
Data Classes
python
1from dataclasses import dataclass, field
2from datetime import datetime
3
4@dataclass
5class User:
6 """User entity with automatic __init__, __repr__, and __eq__."""
7 id: str
8 name: str
9 email: str
10 created_at: datetime = field(default_factory=datetime.now)
11 is_active: bool = True
12
13# Usage
14user = User(
15 id="123",
16 name="Alice",
17 email="alice@example.com"
18)
Data Classes with Validation
python
1@dataclass
2class User:
3 email: str
4 age: int
5
6 def __post_init__(self):
7 # Validate email format
8 if "@" not in self.email:
9 raise ValueError(f"Invalid email: {self.email}")
10 # Validate age range
11 if self.age < 0 or self.age > 150:
12 raise ValueError(f"Invalid age: {self.age}")
Named Tuples
python
1from typing import NamedTuple
2
3class Point(NamedTuple):
4 """Immutable 2D point."""
5 x: float
6 y: float
7
8 def distance(self, other: 'Point') -> float:
9 return ((self.x - other.x) ** 2 + (self.y - other.y) ** 2) ** 0.5
10
11# Usage
12p1 = Point(0, 0)
13p2 = Point(3, 4)
14print(p1.distance(p2)) # 5.0
Decorators
Function Decorators
python
1import functools
2import time
3
4def timer(func: Callable) -> Callable:
5 """Decorator to time function execution."""
6 @functools.wraps(func)
7 def wrapper(*args, **kwargs):
8 start = time.perf_counter()
9 result = func(*args, **kwargs)
10 elapsed = time.perf_counter() - start
11 print(f"{func.__name__} took {elapsed:.4f}s")
12 return result
13 return wrapper
14
15@timer
16def slow_function():
17 time.sleep(1)
18
19# slow_function() prints: slow_function took 1.0012s
Parameterized Decorators
python
1def repeat(times: int):
2 """Decorator to repeat a function multiple times."""
3 def decorator(func: Callable) -> Callable:
4 @functools.wraps(func)
5 def wrapper(*args, **kwargs):
6 results = []
7 for _ in range(times):
8 results.append(func(*args, **kwargs))
9 return results
10 return wrapper
11 return decorator
12
13@repeat(times=3)
14def greet(name: str) -> str:
15 return f"Hello, {name}!"
16
17# greet("Alice") returns ["Hello, Alice!", "Hello, Alice!", "Hello, Alice!"]
Class-Based Decorators
python
1class CountCalls:
2 """Decorator that counts how many times a function is called."""
3 def __init__(self, func: Callable):
4 functools.update_wrapper(self, func)
5 self.func = func
6 self.count = 0
7
8 def __call__(self, *args, **kwargs):
9 self.count += 1
10 print(f"{self.func.__name__} has been called {self.count} times")
11 return self.func(*args, **kwargs)
12
13@CountCalls
14def process():
15 pass
16
17# Each call to process() prints the call count
Concurrency Patterns
Threading for I/O-Bound Tasks
python
1import concurrent.futures
2import threading
3
4def fetch_url(url: str) -> str:
5 """Fetch a URL (I/O-bound operation)."""
6 import urllib.request
7 with urllib.request.urlopen(url) as response:
8 return response.read().decode()
9
10def fetch_all_urls(urls: list[str]) -> dict[str, str]:
11 """Fetch multiple URLs concurrently using threads."""
12 with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
13 future_to_url = {executor.submit(fetch_url, url): url for url in urls}
14 results = {}
15 for future in concurrent.futures.as_completed(future_to_url):
16 url = future_to_url[future]
17 try:
18 results[url] = future.result()
19 except Exception as e:
20 results[url] = f"Error: {e}"
21 return results
Multiprocessing for CPU-Bound Tasks
python
1def process_data(data: list[int]) -> int:
2 """CPU-intensive computation."""
3 return sum(x ** 2 for x in data)
4
5def process_all(datasets: list[list[int]]) -> list[int]:
6 """Process multiple datasets using multiple processes."""
7 with concurrent.futures.ProcessPoolExecutor() as executor:
8 results = list(executor.map(process_data, datasets))
9 return results
Async/Await for Concurrent I/O
python
1import asyncio
2
3async def fetch_async(url: str) -> str:
4 """Fetch a URL asynchronously."""
5 import aiohttp
6 async with aiohttp.ClientSession() as session:
7 async with session.get(url) as response:
8 return await response.text()
9
10async def fetch_all(urls: list[str]) -> dict[str, str]:
11 """Fetch multiple URLs concurrently."""
12 tasks = [fetch_async(url) for url in urls]
13 results = await asyncio.gather(*tasks, return_exceptions=True)
14 return dict(zip(urls, results))
Package Organization
Standard Project Layout
myproject/
├── src/
│ └── mypackage/
│ ├── __init__.py
│ ├── main.py
│ ├── api/
│ │ ├── __init__.py
│ │ └── routes.py
│ ├── models/
│ │ ├── __init__.py
│ │ └── user.py
│ └── utils/
│ ├── __init__.py
│ └── helpers.py
├── tests/
│ ├── __init__.py
│ ├── conftest.py
│ ├── test_api.py
│ └── test_models.py
├── pyproject.toml
├── README.md
└── .gitignore
Import Conventions
python
1# Good: Import order - stdlib, third-party, local
2import os
3import sys
4from pathlib import Path
5
6import requests
7from fastapi import FastAPI
8
9from mypackage.models import User
10from mypackage.utils import format_name
11
12# Good: Use isort for automatic import sorting
13# pip install isort
init.py for Package Exports
python
1# mypackage/__init__.py
2"""mypackage - A sample Python package."""
3
4__version__ = "1.0.0"
5
6# Export main classes/functions at package level
7from mypackage.models import User, Post
8from mypackage.utils import format_name
9
10__all__ = ["User", "Post", "format_name"]
Using slots for Memory Efficiency
python
1# Bad: Regular class uses __dict__ (more memory)
2class Point:
3 def __init__(self, x: float, y: float):
4 self.x = x
5 self.y = y
6
7# Good: __slots__ reduces memory usage
8class Point:
9 __slots__ = ['x', 'y']
10
11 def __init__(self, x: float, y: float):
12 self.x = x
13 self.y = y
Generator for Large Data
python
1# Bad: Returns full list in memory
2def read_lines(path: str) -> list[str]:
3 with open(path) as f:
4 return [line.strip() for line in f]
5
6# Good: Yields lines one at a time
7def read_lines(path: str) -> Iterator[str]:
8 with open(path) as f:
9 for line in f:
10 yield line.strip()
Avoid String Concatenation in Loops
python
1# Bad: O(n²) due to string immutability
2result = ""
3for item in items:
4 result += str(item)
5
6# Good: O(n) using join
7result = "".join(str(item) for item in items)
8
9# Good: Using StringIO for building
10from io import StringIO
11
12buffer = StringIO()
13for item in items:
14 buffer.write(str(item))
15result = buffer.getvalue()
Essential Commands
bash
1# Code formatting
2black .
3isort .
4
5# Linting
6ruff check .
7pylint mypackage/
8
9# Type checking
10mypy .
11
12# Testing
13pytest --cov=mypackage --cov-report=html
14
15# Security scanning
16bandit -r .
17
18# Dependency management
19pip-audit
20safety check
pyproject.toml Configuration
toml
1[project]
2name = "mypackage"
3version = "1.0.0"
4requires-python = ">=3.9"
5dependencies = [
6 "requests>=2.31.0",
7 "pydantic>=2.0.0",
8]
9
10[project.optional-dependencies]
11dev = [
12 "pytest>=7.4.0",
13 "pytest-cov>=4.1.0",
14 "black>=23.0.0",
15 "ruff>=0.1.0",
16 "mypy>=1.5.0",
17]
18
19[tool.black]
20line-length = 88
21target-version = ['py39']
22
23[tool.ruff]
24line-length = 88
25select = ["E", "F", "I", "N", "W"]
26
27[tool.mypy]
28python_version = "3.9"
29warn_return_any = true
30warn_unused_configs = true
31disallow_untyped_defs = true
32
33[tool.pytest.ini_options]
34testpaths = ["tests"]
35addopts = "--cov=mypackage --cov-report=term-missing"
Quick Reference: Python Idioms
| Idiom | Description |
|---|
| EAFP | Easier to Ask Forgiveness than Permission |
| Context managers | Use with for resource management |
| List comprehensions | For simple transformations |
| Generators | For lazy evaluation and large datasets |
| Type hints | Annotate function signatures |
| Dataclasses | For data containers with auto-generated methods |
__slots__ | For memory optimization |
| f-strings | For string formatting (Python 3.6+) |
pathlib.Path | For path operations (Python 3.4+) |
enumerate | For index-element pairs in loops |
Anti-Patterns to Avoid
python
1# Bad: Mutable default arguments
2def append_to(item, items=[]):
3 items.append(item)
4 return items
5
6# Good: Use None and create new list
7def append_to(item, items=None):
8 if items is None:
9 items = []
10 items.append(item)
11 return items
12
13# Bad: Checking type with type()
14if type(obj) == list:
15 process(obj)
16
17# Good: Use isinstance
18if isinstance(obj, list):
19 process(obj)
20
21# Bad: Comparing to None with ==
22if value == None:
23 process()
24
25# Good: Use is
26if value is None:
27 process()
28
29# Bad: from module import *
30from os.path import *
31
32# Good: Explicit imports
33from os.path import join, exists
34
35# Bad: Bare except
36try:
37 risky_operation()
38except:
39 pass
40
41# Good: Specific exception
42try:
43 risky_operation()
44except SpecificError as e:
45 logger.error(f"Operation failed: {e}")
Remember: Python code should be readable, explicit, and follow the principle of least surprise. When in doubt, prioritize clarity over cleverness.