Exercises NotebookMath for LLMs

Graph Neural Networks

Graph Theory / Graph Neural Networks

Run notebook
Private notes
0/8000

Notes stay private to your browser until account sync is configured.

Exercises Notebook

Exercises Notebook

Converted from exercises.ipynb for web reading.

Graph Neural Networks - Exercises

This notebook contains 10 progressive exercises for 05-Graph-Neural-Networks. Each exercise has a learner workspace followed by a complete reference solution. The examples emphasize graph math used in retrieval, GNNs, spectral methods, and network analysis.

Code cell 2

import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl

try:
    import seaborn as sns
    sns.set_theme(style="whitegrid", palette="colorblind")
    HAS_SNS = True
except ImportError:
    plt.style.use("seaborn-v0_8-whitegrid")
    HAS_SNS = False

mpl.rcParams.update({
    "figure.figsize":    (10, 6),
    "figure.dpi":         120,
    "font.size":           13,
    "axes.titlesize":      15,
    "axes.labelsize":      13,
    "xtick.labelsize":     11,
    "ytick.labelsize":     11,
    "legend.fontsize":     11,
    "legend.framealpha":   0.85,
    "lines.linewidth":      2.0,
    "axes.spines.top":     False,
    "axes.spines.right":   False,
    "savefig.bbox":       "tight",
    "savefig.dpi":         150,
})
np.random.seed(42)
print("Plot setup complete.")

Code cell 3

import numpy as np
import numpy.linalg as la
from collections import deque, defaultdict
import heapq

np.set_printoptions(precision=8, suppress=True)
np.random.seed(42)

def header(title):
    print("\n" + "=" * len(title))
    print(title)
    print("=" * len(title))

def check_true(name, cond):
    ok=bool(cond)
    print(f"{'PASS' if ok else 'FAIL'} - {name}")
    return ok

def check_close(name, got, expected, tol=1e-8):
    ok=np.allclose(got, expected, atol=tol, rtol=tol)
    print(f"{'PASS' if ok else 'FAIL'} - {name}")
    if not ok:
        print('  got     =', got)
        print('  expected=', expected)
    return ok

def adj_from_edges(n, edges, directed=False, weighted=False):
    A=np.zeros((n,n), dtype=float)
    for e in edges:
        if weighted:
            u,v,w=e
        else:
            u,v=e; w=1.0
        A[u,v]=w
        if not directed: A[v,u]=w
    return A

def components(A):
    n=A.shape[0]; seen=set(); comps=[]
    for s in range(n):
        if s in seen: continue
        q=deque([s]); seen.add(s); comp=[]
        while q:
            u=q.popleft(); comp.append(u)
            for v in np.flatnonzero(A[u]):
                if int(v) not in seen:
                    seen.add(int(v)); q.append(int(v))
        comps.append(comp)
    return comps

def laplacian(A):
    return np.diag(A.sum(axis=1))-A

def softmax(x):
    x=np.asarray(x,float); e=np.exp(x-x.max()); return e/e.sum()

print("Chapter 11 helper setup complete.")

Exercise 1: One-Hop Message Passing

Aggregate neighbor features by multiplying AXAX.

Code cell 5

# Your Solution
# Exercise 1 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 1.")

Code cell 6

# Solution
# Exercise 1 - One-Hop Message Passing
header("Exercise 1: message passing")
A=adj_from_edges(3,[(0,1),(1,2)]); X=np.array([[1.,0.],[0.,1.],[2.,1.]])
M=A@X
check_close("node 1 gets 0 and 2", M[1], X[0]+X[2])

Exercise 2: GCN Normalization

Compute D1/2(A+I)D1/2XWD^{-1/2}(A+I)D^{-1/2}XW.

Code cell 8

# Your Solution
# Exercise 2 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 2.")

Code cell 9

# Solution
# Exercise 2 - GCN Normalization
header("Exercise 2: GCN layer")
A=adj_from_edges(3,[(0,1),(1,2)])+np.eye(3); X=np.eye(3); W=np.ones((3,2))
d=A.sum(axis=1); S=np.diag(1/np.sqrt(d))@A@np.diag(1/np.sqrt(d)); H=S@X@W
print(H)
check_close("shape", np.array(H.shape), np.array([3,2]))

Exercise 3: GraphSAGE Mean

Aggregate each node with mean neighbor features.

Code cell 11

# Your Solution
# Exercise 3 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 3.")

Code cell 12

# Solution
# Exercise 3 - GraphSAGE Mean
header("Exercise 3: GraphSAGE mean")
A=adj_from_edges(3,[(0,1),(1,2)]); X=np.array([[1.,0.],[0.,2.],[3.,1.]])
mean=(A@X)/(A.sum(axis=1,keepdims=True)+1e-12)
check_close("node 1 mean", mean[1], (X[0]+X[2])/2)

Exercise 4: GAT Attention

Normalize attention coefficients over neighbors.

Code cell 14

# Your Solution
# Exercise 4 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 4.")

Code cell 15

# Solution
# Exercise 4 - GAT Attention
header("Exercise 4: GAT attention")
scores=np.array([1.0,0.5,-0.5]); alpha=softmax(scores)
print(alpha)
check_close("sum one", alpha.sum(), 1.0)
check_true("largest score largest weight", alpha[0]>alpha[1]>alpha[2])

Exercise 5: Permutation Equivariance

Permute nodes and verify PAXPAX behavior.

Code cell 17

# Your Solution
# Exercise 5 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 5.")

Code cell 18

# Solution
# Exercise 5 - Permutation Equivariance
header("Exercise 5: equivariance")
A=adj_from_edges(3,[(0,1),(1,2)]); X=np.arange(6.).reshape(3,2); perm=[2,0,1]; P=np.eye(3)[perm]
left=(P@A@P.T)@(P@X); right=P@(A@X)
check_close("equivariance", left, right)

Exercise 6: Oversmoothing

Apply repeated normalized aggregation and observe variance shrinking.

Code cell 20

# Your Solution
# Exercise 6 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 6.")

Code cell 21

# Solution
# Exercise 6 - Oversmoothing
header("Exercise 6: oversmoothing")
A=adj_from_edges(5,[(0,1),(1,2),(2,3),(3,4)])+np.eye(5); S=A/A.sum(axis=1,keepdims=True)
X=np.eye(5); vars=[]
for _ in range(20): X=S@X; vars.append(np.var(X,axis=0).mean())
print(vars[0],vars[-1])
check_true("variance shrinks", vars[-1]<vars[0])

Exercise 7: Readout Invariance

Show sum pooling is invariant to node ordering.

Code cell 23

# Your Solution
# Exercise 7 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 7.")

Code cell 24

# Solution
# Exercise 7 - Readout Invariance
header("Exercise 7: readout")
X=np.array([[1.,2.],[0.,1.],[3.,0.]]); perm=[2,0,1]
check_close("sum pooling invariant", X.sum(axis=0), X[perm].sum(axis=0))

Exercise 8: WL Color Refinement

Perform one Weisfeiler-Lehman color refinement step.

Code cell 26

# Your Solution
# Exercise 8 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 8.")

Code cell 27

# Solution
# Exercise 8 - WL Color Refinement
header("Exercise 8: WL refinement")
A=adj_from_edges(4,[(0,1),(1,2),(2,3)]); colors=[0,0,0,0]
signatures=[]
for i in range(4): signatures.append((colors[i], tuple(sorted(colors[j] for j in np.flatnonzero(A[i])))))
new={sig:k for k,sig in enumerate(sorted(set(signatures)))}; refined=[new[s] for s in signatures]
print(refined)
check_true("endpoints differ from middle", refined[0]!=refined[1])

Exercise 9: Heterophily Warning

Show neighbor averaging can blur opposite labels.

Code cell 29

# Your Solution
# Exercise 9 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 9.")

Code cell 30

# Solution
# Exercise 9 - Heterophily Warning
header("Exercise 9: heterophily")
A=adj_from_edges(4,[(0,1),(1,2),(2,3)]); y=np.array([0,1,0,1],float)
avg=(A@y)/(A.sum(axis=1)+1e-12)
print(avg)
check_true("node 1 neighbors mostly opposite", avg[1]<0.5)

Exercise 10: Neighbor Sampling

Sample a fixed number of neighbors per node reproducibly.

Code cell 32

# Your Solution
# Exercise 10 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 10.")

Code cell 33

# Solution
# Exercise 10 - Neighbor Sampling
header("Exercise 10: neighbor sampling")
rng=np.random.default_rng(0); adj={0:[1,2,3],1:[0,2],2:[0,1,3],3:[0,2]}
sample={u:sorted(rng.choice(adj[u], size=min(2,len(adj[u])), replace=False).tolist()) for u in adj}
print(sample)
check_true("at most two neighbors", all(len(v)<=2 for v in sample.values()))

Skill Check

Test this lesson

Answer 4 quick questions to lock in the lesson and feed your adaptive practice queue.

--
Score
0/4
Answered
Not attempted
Status
1

Which module does this lesson belong to?

2

Which section is covered in this lesson content?

3

Which term is most central to this lesson?

4

What is the best way to use this lesson for real learning?

Your answers save locally first, then sync when account storage is available.
Practice queue