Exercises Notebook
Converted from
exercises.ipynbfor web reading.
Graph Algorithms - Exercises
This notebook contains 10 progressive exercises for 03-Graph-Algorithms. Each exercise has a learner workspace followed by a complete reference solution. The examples emphasize graph math used in retrieval, GNNs, spectral methods, and network analysis.
Code cell 2
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
try:
import seaborn as sns
sns.set_theme(style="whitegrid", palette="colorblind")
HAS_SNS = True
except ImportError:
plt.style.use("seaborn-v0_8-whitegrid")
HAS_SNS = False
mpl.rcParams.update({
"figure.figsize": (10, 6),
"figure.dpi": 120,
"font.size": 13,
"axes.titlesize": 15,
"axes.labelsize": 13,
"xtick.labelsize": 11,
"ytick.labelsize": 11,
"legend.fontsize": 11,
"legend.framealpha": 0.85,
"lines.linewidth": 2.0,
"axes.spines.top": False,
"axes.spines.right": False,
"savefig.bbox": "tight",
"savefig.dpi": 150,
})
np.random.seed(42)
print("Plot setup complete.")
Code cell 3
import numpy as np
import numpy.linalg as la
from collections import deque, defaultdict
import heapq
np.set_printoptions(precision=8, suppress=True)
np.random.seed(42)
def header(title):
print("\n" + "=" * len(title))
print(title)
print("=" * len(title))
def check_true(name, cond):
ok=bool(cond)
print(f"{'PASS' if ok else 'FAIL'} - {name}")
return ok
def check_close(name, got, expected, tol=1e-8):
ok=np.allclose(got, expected, atol=tol, rtol=tol)
print(f"{'PASS' if ok else 'FAIL'} - {name}")
if not ok:
print(' got =', got)
print(' expected=', expected)
return ok
def adj_from_edges(n, edges, directed=False, weighted=False):
A=np.zeros((n,n), dtype=float)
for e in edges:
if weighted:
u,v,w=e
else:
u,v=e; w=1.0
A[u,v]=w
if not directed: A[v,u]=w
return A
def components(A):
n=A.shape[0]; seen=set(); comps=[]
for s in range(n):
if s in seen: continue
q=deque([s]); seen.add(s); comp=[]
while q:
u=q.popleft(); comp.append(u)
for v in np.flatnonzero(A[u]):
if int(v) not in seen:
seen.add(int(v)); q.append(int(v))
comps.append(comp)
return comps
def laplacian(A):
return np.diag(A.sum(axis=1))-A
def softmax(x):
x=np.asarray(x,float); e=np.exp(x-x.max()); return e/e.sum()
print("Chapter 11 helper setup complete.")
Exercise 1: BFS Shortest Paths
Use BFS for unweighted shortest paths.
Code cell 5
# Your Solution
# Exercise 1 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 1.")
Code cell 6
# Solution
# Exercise 1 - BFS Shortest Paths
header("Exercise 1: BFS")
A=adj_from_edges(6,[(0,1),(1,2),(0,3),(3,4),(4,5),(2,5)])
d=np.full(6,np.inf); d[0]=0; q=deque([0])
while q:
u=q.popleft()
for v in np.flatnonzero(A[u]):
if np.isinf(d[v]): d[v]=d[u]+1; q.append(int(v))
print(d)
check_close("distance to 5", d[5], 3)
Exercise 2: Dijkstra
Compute weighted shortest paths with a heap.
Code cell 8
# Your Solution
# Exercise 2 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 2.")
Code cell 9
# Solution
# Exercise 2 - Dijkstra
header("Exercise 2: Dijkstra")
adj={0:[(1,2),(2,5)],1:[(2,1),(3,4)],2:[(3,1)],3:[]}
d=[float('inf')]*4; d[0]=0; pq=[(0,0)]
while pq:
du,u=heapq.heappop(pq)
if du!=d[u]: continue
for v,w in adj[u]:
if du+w<d[v]: d[v]=du+w; heapq.heappush(pq,(d[v],v))
print(d)
check_close("shortest to 3", d[3], 4)
Exercise 3: Topological Sort
Sort a DAG with Kahn's algorithm.
Code cell 11
# Your Solution
# Exercise 3 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 3.")
Code cell 12
# Solution
# Exercise 3 - Topological Sort
header("Exercise 3: topo sort")
edges=[(0,2),(1,2),(2,3),(1,3)]; n=4
ind=[0]*n; adj=[[] for _ in range(n)]
for u,v in edges: adj[u].append(v); ind[v]+=1
q=deque([i for i in range(n) if ind[i]==0]); order=[]
while q:
u=q.popleft(); order.append(u)
for v in adj[u]: ind[v]-=1; q.append(v) if ind[v]==0 else None
print(order)
pos={v:i for i,v in enumerate(order)}
check_true("valid topo order", all(pos[u]<pos[v] for u,v in edges))
Exercise 4: Cycle Detection
Detect a cycle in a directed graph using DFS colors.
Code cell 14
# Your Solution
# Exercise 4 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 4.")
Code cell 15
# Solution
# Exercise 4 - Cycle Detection
header("Exercise 4: cycle detection")
adj={0:[1],1:[2],2:[0],3:[1]}; color=defaultdict(int); has=False
def dfs(u):
global has
color[u]=1
for v in adj.get(u,[]):
if color[v]==1: return True
if color[v]==0 and dfs(v): return True
color[u]=2; return False
has=any(dfs(u) for u in adj if color[u]==0)
check_true("cycle exists", has)
Exercise 5: Connected Components
Count components in an undirected graph.
Code cell 17
# Your Solution
# Exercise 5 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 5.")
Code cell 18
# Solution
# Exercise 5 - Connected Components
header("Exercise 5: components")
A=adj_from_edges(7,[(0,1),(1,2),(3,4),(5,6)])
comps=components(A)
print(comps)
check_true("three components", len(comps)==3)
Exercise 6: Kruskal MST
Find a minimum spanning tree by Kruskal's algorithm.
Code cell 20
# Your Solution
# Exercise 6 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 6.")
Code cell 21
# Solution
# Exercise 6 - Kruskal MST
header("Exercise 6: MST")
edges=[(0,1,1),(1,2,2),(0,2,4),(2,3,1),(1,3,5)]
parent=list(range(4))
def find(x):
while parent[x]!=x: x=parent[x]
return x
cost=0; chosen=[]
for u,v,w in sorted(edges,key=lambda e:e[2]):
ru,rv=find(u),find(v)
if ru!=rv: parent[ru]=rv; cost+=w; chosen.append((u,v,w))
print(chosen,cost)
check_close("MST cost", cost, 4)
Exercise 7: PageRank
Run a few PageRank iterations on a directed graph.
Code cell 23
# Your Solution
# Exercise 7 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 7.")
Code cell 24
# Solution
# Exercise 7 - PageRank
header("Exercise 7: PageRank")
A=adj_from_edges(4,[(0,1),(1,2),(2,0),(2,3),(3,2)],directed=True)
P=A/(A.sum(axis=1,keepdims=True)+1e-12); pr=np.ones(4)/4; alpha=0.85
for _ in range(50): pr=alpha*P.T@pr+(1-alpha)/4
print(pr)
check_close("probability", pr.sum(), 1.0)
check_true("nonnegative", np.all(pr>=0))
Exercise 8: Betweenness Toy Count
Count how often node 1 lies on shortest paths in a path graph.
Code cell 26
# Your Solution
# Exercise 8 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 8.")
Code cell 27
# Solution
# Exercise 8 - Betweenness Toy Count
header("Exercise 8: betweenness toy")
# path 0-1-2-3: node 1 lies on shortest pairs (0,2), (0,3)
pairs=[(0,2),(0,3),(2,3)]
count=sum(1 for s,t in pairs if s<1<t or t<1<s)
print("count", count)
check_true("node 1 count", count==2)
Exercise 9: A* with Zero Heuristic
Show A* reduces to Dijkstra when heuristic is zero.
Code cell 29
# Your Solution
# Exercise 9 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 9.")
Code cell 30
# Solution
# Exercise 9 - A* with Zero Heuristic
header("Exercise 9: A star zero heuristic")
adj={0:[(1,1),(2,4)],1:[(2,2),(3,5)],2:[(3,1)],3:[]}
g=[float('inf')]*4; g[0]=0; pq=[(0,0)]
while pq:
_,u=heapq.heappop(pq)
for v,w in adj[u]:
if g[u]+w<g[v]: g[v]=g[u]+w; heapq.heappush(pq,(g[v],v))
check_close("distance", g[3], 4)
Exercise 10: Algorithmic Complexity
Compare adjacency matrix and list neighbor scans.
Code cell 32
# Your Solution
# Exercise 10 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 10.")
Code cell 33
# Solution
# Exercise 10 - Algorithmic Complexity
header("Exercise 10: complexity")
n=1000; degree=10
matrix_scan=n; list_scan=degree
print("matrix scan",matrix_scan,"list scan",list_scan)
check_true("list better for sparse graph", list_scan<matrix_scan)