Trees and graphs form the backbone of many technical interview problems. Master these traversal patterns and algorithm templates — most graph problems reduce to one of a handful of approaches.
Tree Traversals
from collections import deque
from typing import Optional, List
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
# --- Recursive traversals ---
def inorder(root: Optional[TreeNode]) -> List[int]:
return inorder(root.left) + [root.val] + inorder(root.right) if root else []
def preorder(root: Optional[TreeNode]) -> List[int]:
return [root.val] + preorder(root.left) + preorder(root.right) if root else []
def postorder(root: Optional[TreeNode]) -> List[int]:
return postorder(root.left) + postorder(root.right) + [root.val] if root else []
# --- Iterative inorder (stack-based) ---
def inorder_iterative(root: Optional[TreeNode]) -> List[int]:
result, stack = [], []
curr = root
while curr or stack:
while curr:
stack.append(curr)
curr = curr.left
curr = stack.pop()
result.append(curr.val)
curr = curr.right
return result
# --- BFS (level order) ---
def level_order(root: Optional[TreeNode]) -> List[List[int]]:
if not root: return []
result, queue = [], deque([root])
while queue:
level = []
for _ in range(len(queue)): # Process entire level
node = queue.popleft()
level.append(node.val)
if node.left: queue.append(node.left)
if node.right: queue.append(node.right)
result.append(level)
return result
# --- Common tree problems ---
def max_depth(root: Optional[TreeNode]) -> int:
return 1 + max(max_depth(root.left), max_depth(root.right)) if root else 0
def is_balanced(root: Optional[TreeNode]) -> bool:
def height(node):
if not node: return 0
lh = height(node.left)
if lh == -1: return -1
rh = height(node.right)
if rh == -1 or abs(lh - rh) > 1: return -1
return 1 + max(lh, rh)
return height(root) != -1
def lca(root: Optional[TreeNode], p: TreeNode, q: TreeNode) -> Optional[TreeNode]:
"""Lowest common ancestor — O(n) time, O(h) space."""
if not root or root is p or root is q:
return root
left = lca(root.left, p, q)
right = lca(root.right, p, q)
return root if left and right else left or right
Graph Representations and DFS/BFS
from collections import defaultdict, deque
from typing import Dict, List, Set
# Adjacency list (most common for interview problems)
Graph = Dict[int, List[int]]
def build_graph(edges: List[tuple], directed=False) -> Graph:
graph = defaultdict(list)
for u, v in edges:
graph[u].append(v)
if not directed:
graph[v].append(u)
return graph
def dfs(graph: Graph, start: int) -> List[int]:
"""Iterative DFS — avoids recursion limit on large graphs."""
visited, stack, order = set(), [start], []
while stack:
node = stack.pop()
if node in visited:
continue
visited.add(node)
order.append(node)
for neighbor in graph[node]: # Push neighbors (reversed for consistent order)
if neighbor not in visited:
stack.append(neighbor)
return order
def bfs(graph: Graph, start: int) -> List[int]:
"""BFS — returns nodes in level order; shortest path in unweighted graph."""
visited, queue, order = {start}, deque([start]), []
while queue:
node = queue.popleft()
order.append(node)
for neighbor in graph[node]:
if neighbor not in visited:
visited.add(neighbor)
queue.append(neighbor)
return order
def shortest_path_bfs(graph: Graph, start: int, end: int) -> List[int]:
"""Reconstruct shortest path in unweighted graph."""
parent = {start: None}
queue = deque([start])
while queue:
node = queue.popleft()
if node == end:
break
for n in graph[node]:
if n not in parent:
parent[n] = node
queue.append(n)
if end not in parent:
return []
path = []
while end is not None:
path.append(end)
end = parent[end]
return path[::-1]
Topological Sort
from collections import defaultdict, deque
def topo_sort_kahn(n: int, prerequisites: List[tuple]) -> List[int]:
"""
Kahn's algorithm (BFS-based) — also detects cycles.
Returns empty list if cycle exists.
"""
in_degree = [0] * n
graph = defaultdict(list)
for a, b in prerequisites: # b -> a (b must come before a)
graph[b].append(a)
in_degree[a] += 1
queue = deque([i for i in range(n) if in_degree[i] == 0])
order = []
while queue:
node = queue.popleft()
order.append(node)
for neighbor in graph[node]:
in_degree[neighbor] -= 1
if in_degree[neighbor] == 0:
queue.append(neighbor)
return order if len(order) == n else [] # Empty = cycle detected
def topo_sort_dfs(n: int, edges: List[tuple]) -> List[int]:
"""DFS-based topological sort."""
graph = defaultdict(list)
for u, v in edges:
graph[u].append(v)
visited = set()
temp = set() # Cycle detection
order = []
def dfs(node):
if node in temp: raise ValueError("Cycle detected")
if node in visited: return
temp.add(node)
for neighbor in graph[node]:
dfs(neighbor)
temp.discard(node)
visited.add(node)
order.append(node)
for i in range(n):
if i not in visited:
dfs(i)
return order[::-1]
# Course schedule: can you finish all courses given prerequisites?
def can_finish(num_courses: int, prerequisites: List[tuple]) -> bool:
return len(topo_sort_kahn(num_courses, prerequisites)) == num_courses
Dijkstra’s Shortest Path
import heapq
WeightedGraph = Dict[int, List[tuple]] # node → [(weight, neighbor)]
def dijkstra(graph: WeightedGraph, start: int) -> Dict[int, int]:
"""
Single-source shortest paths in weighted graph with non-negative edges.
O((V + E) log V) with binary heap.
"""
dist = defaultdict(lambda: float("inf"))
dist[start] = 0
heap = [(0, start)] # (distance, node)
while heap:
d, u = heapq.heappop(heap)
if d > dist[u]:
continue # Stale entry — skip
for w, v in graph[u]:
nd = d + w
if nd dist[u]: continue
for w, v in graph[u]:
nd = d + w
if nd < dist[v]:
dist[v] = nd
parent[v] = u
heapq.heappush(heap, (nd, v))
path = []
node = end
while node is not None:
path.append(node)
node = parent.get(node)
return path[::-1], dist[end]
Union-Find (Disjoint Set Union)
class UnionFind:
"""
Union-Find with path compression + union by rank.
Operations: O(alpha(n)) ≈ O(1) amortized.
"""
def __init__(self, n: int):
self.parent = list(range(n))
self.rank = [0] * n
self.count = n # Number of distinct components
def find(self, x: int) -> int:
if self.parent[x] != x:
self.parent[x] = self.find(self.parent[x]) # Path compression
return self.parent[x]
def union(self, x: int, y: int) -> bool:
px, py = self.find(x), self.find(y)
if px == py: return False # Already connected
if self.rank[px] bool:
return self.find(x) == self.find(y)
# Number of islands using Union-Find
def num_islands(grid: List[List[str]]) -> int:
if not grid: return 0
m, n = len(grid), len(grid[0])
uf = UnionFind(m * n)
land_count = 0
for i in range(m):
for j in range(n):
if grid[i][j] == "1":
land_count += 1
for di, dj in [(0,1),(1,0)]:
ni, nj = i+di, j+dj
if 0 <= ni < m and 0 <= nj < n and grid[ni][nj] == "1":
if uf.union(i*n+j, ni*n+nj):
land_count -= 1
return land_count
Algorithm Selection Guide
| Problem Type | Algorithm | Complexity |
|---|---|---|
| Shortest path, unweighted | BFS | O(V+E) |
| Shortest path, weighted (non-neg) | Dijkstra | O((V+E)logV) |
| Shortest path, negative weights | Bellman-Ford | O(VE) |
| All-pairs shortest path | Floyd-Warshall | O(V^3) |
| Minimum spanning tree | Kruskal (UF) / Prim | O(E log E) |
| Cycle detection | DFS with color / UF | O(V+E) |
| Dependency ordering | Topological sort | O(V+E) |
| Connected components | Union-Find / BFS/DFS | O(V+E) |
Frequently Asked Questions
What is the time complexity of BFS and DFS?
Both BFS and DFS have O(V + E) time complexity, where V = vertices and E = edges. BFS uses O(V) space for the queue (stores up to one entire level). DFS uses O(V) space in the worst case (for the recursion stack on a linear graph), or O(h) where h is the height for balanced trees.
When should you use BFS vs DFS for graph problems?
Use BFS when you need the shortest path in an unweighted graph, or need to process nodes level by level. Use DFS when you need to explore all paths, detect cycles, do topological sorting, or the solution is far from the start. DFS is more memory-efficient for deep graphs; BFS guarantees shortest path.
What is Dijkstra's algorithm used for?
Dijkstra's algorithm finds the shortest path from a source node to all other nodes in a weighted graph with non-negative edge weights. It uses a min-heap (priority queue) to always expand the cheapest unvisited node. Time complexity O((V + E) log V). For negative weights, use Bellman-Ford instead.
{
“@context”: “https://schema.org”,
“@type”: “FAQPage”,
“mainEntity”: [
{
“@type”: “Question”,
“name”: “What is the time complexity of BFS and DFS?”,
“acceptedAnswer”: {
“@type”: “Answer”,
“text”: “Both BFS and DFS have O(V + E) time complexity, where V = vertices and E = edges. BFS uses O(V) space for the queue (stores up to one entire level). DFS uses O(V) space in the worst case (for the recursion stack on a linear graph), or O(h) where h is the height for balanced trees.”
}
},
{
“@type”: “Question”,
“name”: “When should you use BFS vs DFS for graph problems?”,
“acceptedAnswer”: {
“@type”: “Answer”,
“text”: “Use BFS when you need the shortest path in an unweighted graph, or need to process nodes level by level. Use DFS when you need to explore all paths, detect cycles, do topological sorting, or the solution is far from the start. DFS is more memory-efficient for deep graphs; BFS guarantees shortest path.”
}
},
{
“@type”: “Question”,
“name”: “What is Dijkstra’s algorithm used for?”,
“acceptedAnswer”: {
“@type”: “Answer”,
“text”: “Dijkstra’s algorithm finds the shortest path from a source node to all other nodes in a weighted graph with non-negative edge weights. It uses a min-heap (priority queue) to always expand the cheapest unvisited node. Time complexity O((V + E) log V). For negative weights, use Bellman-Ford instead.”
}
}
]
}