Exercises Notebook
Converted from
exercises.ipynbfor web reading.
Hilbert Spaces: Exercises
This notebook contains 8 progressive exercises covering inner products, Hilbert norms, projection, least squares, orthonormal bases, Riesz representation, operators, and kernel previews. Each exercise has a working area followed by a full solution.
Code cell 2
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
try:
import seaborn as sns
sns.set_theme(style="whitegrid", palette="colorblind")
HAS_SNS = True
except ImportError:
plt.style.use("seaborn-v0_8-whitegrid")
HAS_SNS = False
mpl.rcParams.update({
"figure.figsize": (10, 6),
"figure.dpi": 120,
"font.size": 13,
"axes.titlesize": 15,
"axes.labelsize": 13,
"xtick.labelsize": 11,
"ytick.labelsize": 11,
"legend.fontsize": 11,
"legend.framealpha": 0.85,
"lines.linewidth": 2.0,
"axes.spines.top": False,
"axes.spines.right": False,
"savefig.bbox": "tight",
"savefig.dpi": 150,
})
np.random.seed(42)
print("Plot setup complete.")
Code cell 3
import numpy as np
import matplotlib.pyplot as plt
np.set_printoptions(precision=4, suppress=True)
rng = np.random.default_rng(11)
def header(title):
print("\n" + "=" * len(title))
print(title)
print("=" * len(title))
def check_true(condition, message):
ok = bool(condition)
print(f"{'PASS' if ok else 'FAIL'} - {message}")
assert ok
def check_close(actual, expected, tol=1e-8, message="values close"):
ok = np.allclose(actual, expected, atol=tol, rtol=tol)
print(f"{'PASS' if ok else 'FAIL'} - {message}")
if not ok:
print("expected:\n", expected)
print("actual:\n", actual)
assert ok
print("Hilbert exercise helpers ready.")
Exercise 1: Check an Inner Product
Let . Verify numerically that is symmetric and positive definite on several random vectors.
Code cell 5
# Your Solution
W = np.array([[2.0, 1.0], [1.0, 2.0]])
print("Try checking symmetry and positive definiteness for random vectors.")
Code cell 6
# Solution
W = np.array([[2.0, 1.0], [1.0, 2.0]])
eigvals = np.linalg.eigvalsh(W)
assert np.all(eigvals > 0)
for _ in range(10):
x = rng.normal(size=2)
y = rng.normal(size=2)
left = x @ W @ y
right = y @ W @ x
assert np.isclose(left, right)
assert x @ W @ x > 0
print("W is positive definite, so the weighted form is an inner product.")
Exercise 2: Parallelogram Law
Show with a concrete counterexample that on is not induced by an inner product.
Code cell 8
# Your Solution
u = np.array([1.0, 0.0])
v = np.array([0.0, 1.0])
print("Compute both sides of the parallelogram law using the L1 norm.")
Code cell 9
# Solution
def l1(x):
return np.linalg.norm(x, ord=1)
u = np.array([1.0, 0.0])
v = np.array([0.0, 1.0])
lhs = l1(u + v) ** 2 + l1(u - v) ** 2
rhs = 2 * (l1(u) ** 2 + l1(v) ** 2)
print("lhs =", lhs, "rhs =", rhs)
assert lhs != rhs
print("The L1 norm fails the parallelogram law, so it is not a Hilbert norm.")
Exercise 3: Projection Onto a Subspace
Project onto the span of and . Verify the residual is orthogonal to both spanning vectors.
Code cell 11
# Your Solution
x = np.array([3.0, 2.0, 1.0])
A = np.array([[1.0, 0.0], [1.0, 1.0], [0.0, 1.0]])
print("Use P = A(A^T A)^{-1}A^T or solve least squares.")
Code cell 12
# Solution
x = np.array([3.0, 2.0, 1.0])
A = np.array([[1.0, 0.0], [1.0, 1.0], [0.0, 1.0]])
coef = np.linalg.solve(A.T @ A, A.T @ x)
projection = A @ coef
residual = x - projection
print("coefficients:", coef)
print("projection:", projection)
print("A^T residual:", A.T @ residual)
assert np.allclose(A.T @ residual, 0.0)
Exercise 4: Least Squares as Orthogonal Projection
Fit a line to four points and verify .
Code cell 14
# Your Solution
t = np.array([0.0, 1.0, 2.0, 3.0])
y = np.array([1.0, 2.0, 2.9, 4.2])
print("Build X with columns [1, t], solve least squares, then inspect X.T @ residual.")
Code cell 15
# Solution
t = np.array([0.0, 1.0, 2.0, 3.0])
y = np.array([1.0, 2.0, 2.9, 4.2])
X = np.column_stack([np.ones_like(t), t])
beta, *_ = np.linalg.lstsq(X, y, rcond=None)
residual = y - X @ beta
print("beta:", beta)
print("X^T residual:", X.T @ residual)
assert np.allclose(X.T @ residual, 0.0)
Exercise 5: Gram-Schmidt and Parseval
Orthonormalize three independent vectors and verify Parseval identity for a test vector.
Code cell 17
# Your Solution
V = np.array([[1.0, 1.0, 0.0], [0.0, 1.0, 1.0], [1.0, 0.0, 1.0]])
z = np.array([2.0, -1.0, 0.5])
print("Construct Q, compute Q.T @ Q, and compare norm(z)^2 with sum of squared coordinates.")
Code cell 18
# Solution
def gram_schmidt(V):
cols = []
for v in V.T:
u = v.astype(float).copy()
for q in cols:
u -= (q @ u) * q
cols.append(u / np.linalg.norm(u))
return np.column_stack(cols)
V = np.array([[1.0, 1.0, 0.0], [0.0, 1.0, 1.0], [1.0, 0.0, 1.0]])
z = np.array([2.0, -1.0, 0.5])
Q = gram_schmidt(V)
coords = Q.T @ z
print("Q^T Q:")
print(Q.T @ Q)
print("norm squared:", z @ z)
print("coordinate energy:", coords @ coords)
assert np.allclose(Q.T @ Q, np.eye(3))
assert np.allclose(z @ z, coords @ coords)
Exercise 6: Riesz Representative in Weighted Geometry
For and , find the representative satisfying .
Code cell 20
# Your Solution
G = np.array([[3.0, 1.0], [1.0, 2.0]])
a = np.array([4.0, -1.0])
print("Solve G h = a.")
Code cell 21
# Solution
G = np.array([[3.0, 1.0], [1.0, 2.0]])
a = np.array([4.0, -1.0])
h = np.linalg.solve(G, a)
for _ in range(5):
x = rng.normal(size=2)
assert np.isclose(a @ x, x @ G @ h)
print("Riesz representative:", h)
Exercise 7: Positive Self-Adjoint Operator
Create a covariance matrix from data. Verify it is symmetric positive semidefinite and interpret the largest eigenvector as the first PCA direction.
Code cell 23
# Your Solution
data = rng.normal(size=(80, 3))
print("Center the data, form C = X.T @ X / n, and diagonalize C.")
Code cell 24
# Solution
data = rng.normal(size=(80, 3)) @ np.array([[2.0, 0.0, 0.0], [0.5, 1.0, 0.0], [0.0, 0.2, 0.3]])
centered = data - data.mean(axis=0)
C = centered.T @ centered / len(centered)
vals, vecs = np.linalg.eigh(C)
order = np.argsort(vals)[::-1]
vals = vals[order]
vecs = vecs[:, order]
print("eigenvalues:", vals)
print("first PCA direction:", vecs[:, 0])
assert np.allclose(C, C.T)
assert vals[-1] > -1e-12
Exercise 8: Kernel Gram Matrix Preview
Build an RBF kernel Gram matrix for one-dimensional points and verify it is positive semidefinite up to numerical precision.
Code cell 26
# Your Solution
xs = np.linspace(-1.5, 1.5, 10)[:, None]
print("Compute K_ij = exp(-||x_i - x_j||^2 / (2 sigma^2)) and inspect eigenvalues.")
Code cell 27
# Solution
xs = np.linspace(-1.5, 1.5, 10)[:, None]
sigma = 0.7
sqdist = (xs - xs.T) ** 2
K = np.exp(-sqdist / (2 * sigma**2))
eigvals = np.linalg.eigvalsh(K)
print("kernel eigenvalues:", eigvals)
assert eigvals[0] > -1e-10
print("The finite Gram matrix is PSD, consistent with an implicit Hilbert feature space.")
Exercise 9: Projection Matrix Diagnostics
Build the orthogonal projector onto the column space of a design matrix. Verify symmetry, idempotence, and residual orthogonality.
Code cell 29
# Your Solution
A = np.array([[1.0, 0.0], [1.0, 1.0], [1.0, 2.0]])
y = np.array([1.0, 2.0, 2.5])
print("Construct P and check projection identities.")
Code cell 30
# Solution
header("Exercise 9: Projection Matrix Diagnostics")
A = np.array([[1.0, 0.0], [1.0, 1.0], [1.0, 2.0]])
y = np.array([1.0, 2.0, 2.5])
P = A @ np.linalg.inv(A.T @ A) @ A.T
y_hat = P @ y
r = y - y_hat
check_close(P, P.T, message="projector is symmetric")
check_close(P @ P, P, message="projector is idempotent")
check_close(A.T @ r, np.zeros(A.shape[1]), message="residual is orthogonal to subspace")
print("Takeaway: least squares is an orthogonal projection in a Hilbert space.")
Exercise 10: Explicit Feature Map Kernel
Create a finite feature map, form its Gram matrix, and verify that positive semidefiniteness follows from inner products.
Code cell 32
# Your Solution
x = np.array([-1.0, 0.0, 2.0])
print("Build Phi and K = Phi Phi^T.")
Code cell 33
# Solution
header("Exercise 10: Explicit Feature Map Kernel")
x = np.array([-1.0, 0.0, 2.0])
Phi = np.column_stack([np.ones_like(x), x, x**2])
K = Phi @ Phi.T
eigs = np.linalg.eigvalsh(K)
print("Gram matrix:", K, sep="\n")
print("eigenvalues:", np.round(eigs, 8))
check_true(np.min(eigs) > -1e-10, "Gram matrix is positive semidefinite")
print("Takeaway: kernel methods are Hilbert-space inner products without explicit high-dimensional coordinates.")
Closing Reflection
The exercises move from axioms to geometry, then from geometry to ML objects. That is the right mental order: first know what an inner product gives you, then recognize where algorithms are using it.