Exercises NotebookMath for LLMs

Spectral Graph Theory

Graph Theory / Spectral Graph Theory

Run notebook
Exercises Notebook

Exercises Notebook

Converted from exercises.ipynb for web reading.

Spectral Graph Theory - Exercises

This notebook contains 10 progressive exercises for 04-Spectral-Graph-Theory. Each exercise has a learner workspace followed by a complete reference solution. The examples emphasize graph math used in retrieval, GNNs, spectral methods, and network analysis.

Code cell 2

import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl

try:
    import seaborn as sns
    sns.set_theme(style="whitegrid", palette="colorblind")
    HAS_SNS = True
except ImportError:
    plt.style.use("seaborn-v0_8-whitegrid")
    HAS_SNS = False

mpl.rcParams.update({
    "figure.figsize":    (10, 6),
    "figure.dpi":         120,
    "font.size":           13,
    "axes.titlesize":      15,
    "axes.labelsize":      13,
    "xtick.labelsize":     11,
    "ytick.labelsize":     11,
    "legend.fontsize":     11,
    "legend.framealpha":   0.85,
    "lines.linewidth":      2.0,
    "axes.spines.top":     False,
    "axes.spines.right":   False,
    "savefig.bbox":       "tight",
    "savefig.dpi":         150,
})
np.random.seed(42)
print("Plot setup complete.")

Code cell 3

import numpy as np
import numpy.linalg as la
from collections import deque, defaultdict
import heapq

np.set_printoptions(precision=8, suppress=True)
np.random.seed(42)

def header(title):
    print("\n" + "=" * len(title))
    print(title)
    print("=" * len(title))

def check_true(name, cond):
    ok=bool(cond)
    print(f"{'PASS' if ok else 'FAIL'} - {name}")
    return ok

def check_close(name, got, expected, tol=1e-8):
    ok=np.allclose(got, expected, atol=tol, rtol=tol)
    print(f"{'PASS' if ok else 'FAIL'} - {name}")
    if not ok:
        print('  got     =', got)
        print('  expected=', expected)
    return ok

def adj_from_edges(n, edges, directed=False, weighted=False):
    A=np.zeros((n,n), dtype=float)
    for e in edges:
        if weighted:
            u,v,w=e
        else:
            u,v=e; w=1.0
        A[u,v]=w
        if not directed: A[v,u]=w
    return A

def components(A):
    n=A.shape[0]; seen=set(); comps=[]
    for s in range(n):
        if s in seen: continue
        q=deque([s]); seen.add(s); comp=[]
        while q:
            u=q.popleft(); comp.append(u)
            for v in np.flatnonzero(A[u]):
                if int(v) not in seen:
                    seen.add(int(v)); q.append(int(v))
        comps.append(comp)
    return comps

def laplacian(A):
    return np.diag(A.sum(axis=1))-A

def softmax(x):
    x=np.asarray(x,float); e=np.exp(x-x.max()); return e/e.sum()

print("Chapter 11 helper setup complete.")

Exercise 1: Graph Laplacian

Compute L=DAL=D-A and verify row sums are zero.

Code cell 5

# Your Solution
# Exercise 1 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 1.")

Code cell 6

# Solution
# Exercise 1 - Graph Laplacian
header("Exercise 1: Laplacian")
A=adj_from_edges(4,[(0,1),(1,2),(2,3)])
L=laplacian(A)
check_close("row sums", L.sum(axis=1), np.zeros(4))
check_true("PSD", np.min(la.eigvalsh(L))>-1e-10)

Exercise 2: Multiplicity of Zero

Use Laplacian zero eigenvalues to count components.

Code cell 8

# Your Solution
# Exercise 2 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 2.")

Code cell 9

# Solution
# Exercise 2 - Multiplicity of Zero
header("Exercise 2: zero multiplicity")
A=adj_from_edges(5,[(0,1),(1,2),(3,4)])
vals=la.eigvalsh(laplacian(A)); zeros=np.sum(vals<1e-10)
print(vals)
check_true("two components", zeros==2)

Exercise 3: Fiedler Vector

Compute the second eigenvector for a path graph.

Code cell 11

# Your Solution
# Exercise 3 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 3.")

Code cell 12

# Solution
# Exercise 3 - Fiedler Vector
header("Exercise 3: Fiedler")
A=adj_from_edges(5,[(0,1),(1,2),(2,3),(3,4)])
vals,V=la.eigh(laplacian(A)); f=V[:,1]
print("lambda2",vals[1],"fiedler",f)
check_true("lambda2 positive for connected graph", vals[1]>0)

Exercise 4: Normalized Laplacian

Compute ID1/2AD1/2I-D^{-1/2}AD^{-1/2}.

Code cell 14

# Your Solution
# Exercise 4 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 4.")

Code cell 15

# Solution
# Exercise 4 - Normalized Laplacian
header("Exercise 4: normalized Laplacian")
A=adj_from_edges(3,[(0,1),(1,2)])
d=A.sum(axis=1); Lnorm=np.eye(3)-np.diag(1/np.sqrt(d))@A@np.diag(1/np.sqrt(d))
check_close("symmetric", Lnorm, Lnorm.T)
check_true("eigenvalues nonnegative", np.min(la.eigvalsh(Lnorm))>-1e-10)

Exercise 5: Graph Smoothness

Verify xTLx=(i,j)(xixj)2x^T Lx=\sum_{(i,j)}(x_i-x_j)^2 for undirected edges.

Code cell 17

# Your Solution
# Exercise 5 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 5.")

Code cell 18

# Solution
# Exercise 5 - Graph Smoothness
header("Exercise 5: smoothness")
edges=[(0,1),(1,2),(2,3)]; A=adj_from_edges(4,edges); L=laplacian(A); x=np.array([1.,2.,4.,7.])
energy=x@L@x; edge_sum=sum((x[i]-x[j])**2 for i,j in edges)
check_close("Dirichlet energy", energy, edge_sum)

Exercise 6: Spectral Clustering Toy

Partition a graph by the sign of the Fiedler vector.

Code cell 20

# Your Solution
# Exercise 6 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 6.")

Code cell 21

# Solution
# Exercise 6 - Spectral Clustering Toy
header("Exercise 6: spectral clustering")
A=adj_from_edges(6,[(0,1),(1,2),(3,4),(4,5),(2,3)])
vals,V=la.eigh(laplacian(A)); labels=V[:,1]>0
print(labels)
check_true("two nonempty sides", labels.any() and (~labels).any())

Exercise 7: Random Walk Matrix

Build P=D1AP=D^{-1}A and check row stochasticity.

Code cell 23

# Your Solution
# Exercise 7 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 7.")

Code cell 24

# Solution
# Exercise 7 - Random Walk Matrix
header("Exercise 7: random walk")
A=adj_from_edges(4,[(0,1),(1,2),(2,3),(3,0)])
P=A/A.sum(axis=1,keepdims=True)
check_close("rows sum one", P.sum(axis=1), np.ones(4))

Exercise 8: Graph Fourier Transform

Project a signal onto Laplacian eigenvectors and reconstruct.

Code cell 26

# Your Solution
# Exercise 8 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 8.")

Code cell 27

# Solution
# Exercise 8 - Graph Fourier Transform
header("Exercise 8: graph Fourier")
A=adj_from_edges(4,[(0,1),(1,2),(2,3)])
vals,U=la.eigh(laplacian(A)); x=np.array([1.,0.,2.,1.])
hat=U.T@x; rec=U@hat
check_close("reconstruction", rec, x)

Exercise 9: Heat Diffusion

Apply etLe^{-tL} through eigendecomposition.

Code cell 29

# Your Solution
# Exercise 9 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 9.")

Code cell 30

# Solution
# Exercise 9 - Heat Diffusion
header("Exercise 9: heat diffusion")
A=adj_from_edges(4,[(0,1),(1,2),(2,3)]); L=laplacian(A); vals,U=la.eigh(L); x=np.array([1.,0.,0.,0.])
y=U@np.diag(np.exp(-0.5*vals))@U.T@x
print(y)
check_close("mass conserved", y.sum(), x.sum())

Exercise 10: Spectral Gap

Compare path and complete graph algebraic connectivity.

Code cell 32

# Your Solution
# Exercise 10 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 10.")

Code cell 33

# Solution
# Exercise 10 - Spectral Gap
header("Exercise 10: spectral gap")
P=adj_from_edges(5,[(0,1),(1,2),(2,3),(3,4)]); K=np.ones((5,5))-np.eye(5)
gapP=la.eigvalsh(laplacian(P))[1]; gapK=la.eigvalsh(laplacian(K))[1]
print(gapP,gapK)
check_true("complete graph better connected", gapK>gapP)