Exercises NotebookMath for LLMs

Matrix Decompositions

Advanced Linear Algebra / Matrix Decompositions

Run notebook
Private notes
0/8000

Notes stay private to your browser until account sync is configured.

Exercises Notebook

Exercises Notebook

Converted from exercises.ipynb for web reading.

Matrix Decompositions - Exercises

This notebook contains 10 progressive exercises for 08-Matrix-Decompositions. Each exercise has a learner workspace followed by a complete reference solution. Use the solution cells after making a serious attempt.

Difficulty grows from direct computation to AI-facing interpretation. Formulas use LaTeX-in-Markdown with $...$ and `

......

`.

Code cell 2

import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl

try:
    import seaborn as sns
    sns.set_theme(style="whitegrid", palette="colorblind")
    HAS_SNS = True
except ImportError:
    plt.style.use("seaborn-v0_8-whitegrid")
    HAS_SNS = False

mpl.rcParams.update({
    "figure.figsize":    (10, 6),
    "figure.dpi":         120,
    "font.size":           13,
    "axes.titlesize":      15,
    "axes.labelsize":      13,
    "xtick.labelsize":     11,
    "ytick.labelsize":     11,
    "legend.fontsize":     11,
    "legend.framealpha":   0.85,
    "lines.linewidth":      2.0,
    "axes.spines.top":     False,
    "axes.spines.right":   False,
    "savefig.bbox":       "tight",
    "savefig.dpi":         150,
})
np.random.seed(42)
print("Plot setup complete.")

Code cell 3

import numpy as np
import numpy.linalg as la
import scipy.linalg as sla
from scipy import stats

np.set_printoptions(precision=8, suppress=True)
np.random.seed(42)

COLORS = {
    "primary": "#0077BB",
    "secondary": "#EE7733",
    "tertiary": "#009988",
    "error": "#CC3311",
    "neutral": "#555555",
    "highlight": "#EE3377",
}

def header(title):
    print("\n" + "=" * len(title))
    print(title)
    print("=" * len(title))

def check_true(name, cond):
    ok = bool(cond)
    print(f"{'PASS' if ok else 'FAIL'} - {name}")
    return ok

def check_close(name, got, expected, tol=1e-8):
    ok = np.allclose(got, expected, atol=tol, rtol=tol)
    print(f"{'PASS' if ok else 'FAIL'} - {name}")
    if not ok:
        print("  got     =", got)
        print("  expected=", expected)
    return ok

def softmax(z, axis=-1):
    z = np.asarray(z, dtype=float)
    z = z - np.max(z, axis=axis, keepdims=True)
    e = np.exp(z)
    return e / np.sum(e, axis=axis, keepdims=True)

def gram_schmidt_columns(A, tol=1e-12):
    A = np.asarray(A, dtype=float)
    Q = []
    for j in range(A.shape[1]):
        v = A[:, j].copy()
        for q in Q:
            v -= (q @ v) * q
        n = la.norm(v)
        if n > tol:
            Q.append(v / n)
    return np.column_stack(Q) if Q else np.empty((A.shape[0], 0))

def projection_matrix(A):
    Q = gram_schmidt_columns(A)
    return Q @ Q.T

def numerical_rank(A, tol=1e-10):
    return int(np.sum(la.svd(np.asarray(A, dtype=float), compute_uv=False) > tol))

def stable_rank(A):
    s = la.svd(np.asarray(A, dtype=float), compute_uv=False)
    return float(np.sum(s**2) / (s[0]**2 + 1e-15))

def make_spd(n, seed=0, ridge=0.5):
    rng = np.random.default_rng(seed)
    A = rng.normal(size=(n, n))
    return A.T @ A + ridge * np.eye(n)

print("Chapter 03 helper setup complete.")

Exercise 1: Triangular Solves

Solve Ly=bL y=b and Ux=yU x=y with triangular solvers.

Code cell 5

# Your Solution
# Exercise 1 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 1.")

Code cell 6

# Solution
# Exercise 1 - Triangular Solves
header("Exercise 1: triangular solves")
L=np.array([[2.,0.,0.],[1.,3.,0.],[0.5,-1.,1.]])
U=np.array([[1.,2.,-1.],[0.,3.,4.],[0.,0.,2.]])
x_true=np.array([1.,-2.,0.5]); b=L@U@x_true
y=sla.solve_triangular(L,b,lower=True); x=sla.solve_triangular(U,y)
check_close("solution", x, x_true)

Exercise 2: LU with Pivoting

Use PA=LUPA=LU and verify reconstruction.

Code cell 8

# Your Solution
# Exercise 2 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 2.")

Code cell 9

# Solution
# Exercise 2 - LU with Pivoting
header("Exercise 2: LU")
A=np.array([[0.,2.,1.],[3.,-1.,2.],[1.,1.,1.]])
P,L,U=sla.lu(A)
check_close("A = P L U", P@L@U, A)
check_close("L unit diagonal", np.diag(L), np.ones(3))

Exercise 3: Householder QR

Compute a QR factorization and verify orthogonality.

Code cell 11

# Your Solution
# Exercise 3 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 3.")

Code cell 12

# Solution
# Exercise 3 - Householder QR
header("Exercise 3: QR")
A=np.array([[1.,1.],[1.,0.],[0.,1.]])
Q,R=la.qr(A,mode='reduced')
check_close("Q^T Q", Q.T@Q, np.eye(2))
check_close("QR=A", Q@R, A)

Exercise 4: Least Squares via QR

Solve an overdetermined system stably using QR.

Code cell 14

# Your Solution
# Exercise 4 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 4.")

Code cell 15

# Solution
# Exercise 4 - Least Squares via QR
header("Exercise 4: QR least squares")
A=np.column_stack([np.ones(5), np.arange(5.)]); b=np.array([1.,2.,1.9,3.2,4.1])
Q,R=la.qr(A,mode='reduced'); x=sla.solve_triangular(R,Q.T@b)
check_close("matches lstsq", x, la.lstsq(A,b,rcond=None)[0])
check_close("residual orthogonal", A.T@(A@x-b), np.zeros(2), tol=1e-10)

Exercise 5: Cholesky Solve

Solve an SPD system using Cholesky factors.

Code cell 17

# Your Solution
# Exercise 5 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 5.")

Code cell 18

# Solution
# Exercise 5 - Cholesky Solve
header("Exercise 5: Cholesky solve")
A=make_spd(4,seed=5); b=np.array([1.,2.,-1.,0.5])
L=la.cholesky(A); y=sla.solve_triangular(L,b,lower=True); x=sla.solve_triangular(L.T,y)
check_close("solution", A@x, b)

Exercise 6: Normal Equations Square Conditioning

Show \kappa(A^T A)pprox\kappa(A)^2.

Code cell 20

# Your Solution
# Exercise 6 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 6.")

Code cell 21

# Solution
# Exercise 6 - Normal Equations Square Conditioning
header("Exercise 6: normal equations conditioning")
A=np.vander(np.linspace(0,1,12),5,increasing=True)
ratio=la.cond(A.T@A)/(la.cond(A)**2)
print("ratio", ratio)
check_true("approximately squared", 0.1 < ratio < 10)

Exercise 7: Rank-Revealing QR

Use pivoted QR to identify independent columns.

Code cell 23

# Your Solution
# Exercise 7 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 7.")

Code cell 24

# Solution
# Exercise 7 - Rank-Revealing QR
header("Exercise 7: pivoted QR")
A=np.array([[1.,2.,3.],[0.,1.,1.],[1.,3.,4.]])
Q,R,p=sla.qr(A,pivoting=True,mode='economic')
diag=np.abs(np.diag(R)); rank=np.sum(diag>1e-10)
print("pivots", p, "diag", diag, "rank", rank)
check_true("rank detected", rank == la.matrix_rank(A))

Exercise 8: Randomized Range Finder

Approximate the range of a low-rank matrix using random probes.

Code cell 26

# Your Solution
# Exercise 8 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 8.")

Code cell 27

# Solution
# Exercise 8 - Randomized Range Finder
header("Exercise 8: randomized range")
rng=np.random.default_rng(8); A=rng.normal(size=(30,3))@rng.normal(size=(3,20))
Omega=rng.normal(size=(20,5)); Q,_=la.qr(A@Omega,mode='reduced')
err=la.norm(A-Q@(Q.T@A),'fro')/la.norm(A,'fro')
print("relative projection error", err)
check_true("range captured", err < 1e-10)

Exercise 9: Gaussian Process Log Likelihood

Compute a GP-style log determinant and quadratic term with Cholesky.

Code cell 29

# Your Solution
# Exercise 9 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 9.")

Code cell 30

# Solution
# Exercise 9 - Gaussian Process Log Likelihood
header("Exercise 9: GP Cholesky")
X=np.linspace(0,1,8)[:,None]; D2=(X-X.T)**2; K=np.exp(-D2/0.2)+1e-4*np.eye(8); y=np.sin(4*X[:,0])
L=la.cholesky(K); alpha=sla.cho_solve((L,True),y)
logdet=2*np.sum(np.log(np.diag(L))); nll=0.5*y@alpha+0.5*logdet+0.5*len(y)*np.log(2*np.pi)
print("nll", nll)
check_true("finite likelihood", np.isfinite(nll))
check_close("solve correct", K@alpha, y, tol=1e-8)

Exercise 10: Implicit Solve Derivative

Verify d(A1b)=A1(dA)x+A1dbd(A^{-1}b)=-A^{-1}(dA)x+A^{-1}db by finite differences.

Code cell 32

# Your Solution
# Exercise 10 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 10.")

Code cell 33

# Solution
# Exercise 10 - Implicit Solve Derivative
header("Exercise 10: solve derivative")
A=make_spd(3,seed=10); b=np.array([1.,-1.,2.]); dA=np.diag([0.1,-0.2,0.05]); db=np.array([0.2,0.1,-0.1])
x=la.solve(A,b); dx=la.solve(A, db - dA@x)
eps=1e-6; x_fd=la.solve(A+eps*dA, b+eps*db)
check_close("finite-difference derivative", (x_fd-x)/eps, dx, tol=1e-6)