Exercises NotebookMath for LLMs

Random Graphs

Graph Theory / Random Graphs

Run notebook
Exercises Notebook

Exercises Notebook

Converted from exercises.ipynb for web reading.

Random Graphs - Exercises

This notebook contains 10 progressive exercises for 06-Random-Graphs. Each exercise has a learner workspace followed by a complete reference solution. The examples emphasize graph math used in retrieval, GNNs, spectral methods, and network analysis.

Code cell 2

import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl

try:
    import seaborn as sns
    sns.set_theme(style="whitegrid", palette="colorblind")
    HAS_SNS = True
except ImportError:
    plt.style.use("seaborn-v0_8-whitegrid")
    HAS_SNS = False

mpl.rcParams.update({
    "figure.figsize":    (10, 6),
    "figure.dpi":         120,
    "font.size":           13,
    "axes.titlesize":      15,
    "axes.labelsize":      13,
    "xtick.labelsize":     11,
    "ytick.labelsize":     11,
    "legend.fontsize":     11,
    "legend.framealpha":   0.85,
    "lines.linewidth":      2.0,
    "axes.spines.top":     False,
    "axes.spines.right":   False,
    "savefig.bbox":       "tight",
    "savefig.dpi":         150,
})
np.random.seed(42)
print("Plot setup complete.")

Code cell 3

import numpy as np
import numpy.linalg as la
from collections import deque, defaultdict
import heapq

np.set_printoptions(precision=8, suppress=True)
np.random.seed(42)

def header(title):
    print("\n" + "=" * len(title))
    print(title)
    print("=" * len(title))

def check_true(name, cond):
    ok=bool(cond)
    print(f"{'PASS' if ok else 'FAIL'} - {name}")
    return ok

def check_close(name, got, expected, tol=1e-8):
    ok=np.allclose(got, expected, atol=tol, rtol=tol)
    print(f"{'PASS' if ok else 'FAIL'} - {name}")
    if not ok:
        print('  got     =', got)
        print('  expected=', expected)
    return ok

def adj_from_edges(n, edges, directed=False, weighted=False):
    A=np.zeros((n,n), dtype=float)
    for e in edges:
        if weighted:
            u,v,w=e
        else:
            u,v=e; w=1.0
        A[u,v]=w
        if not directed: A[v,u]=w
    return A

def components(A):
    n=A.shape[0]; seen=set(); comps=[]
    for s in range(n):
        if s in seen: continue
        q=deque([s]); seen.add(s); comp=[]
        while q:
            u=q.popleft(); comp.append(u)
            for v in np.flatnonzero(A[u]):
                if int(v) not in seen:
                    seen.add(int(v)); q.append(int(v))
        comps.append(comp)
    return comps

def laplacian(A):
    return np.diag(A.sum(axis=1))-A

def softmax(x):
    x=np.asarray(x,float); e=np.exp(x-x.max()); return e/e.sum()

print("Chapter 11 helper setup complete.")

Exercise 1: Erdos-Renyi Sampling

Sample G(n,p)G(n,p) with a fixed seed.

Code cell 5

# Your Solution
# Exercise 1 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 1.")

Code cell 6

# Solution
# Exercise 1 - Erdos-Renyi Sampling
header("Exercise 1: ER graph")
rng=np.random.default_rng(1); n=20; p=0.15
R=rng.random((n,n)); A=np.triu((R<p).astype(float),1); A=A+A.T
m=A.sum()/2
print("edges",m)
check_true("symmetric", np.allclose(A,A.T))

Exercise 2: Expected Degree

Compare empirical average degree with (n1)p(n-1)p.

Code cell 8

# Your Solution
# Exercise 2 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 2.")

Code cell 9

# Solution
# Exercise 2 - Expected Degree
header("Exercise 2: expected degree")
rng=np.random.default_rng(2); n=200; p=0.05; R=rng.random((n,n)); A=np.triu((R<p).astype(float),1); A=A+A.T
avg=A.sum(axis=1).mean(); expected=(n-1)*p
print(avg,expected)
check_true("close", abs(avg-expected)<1.0)

Exercise 3: Degree Distribution

Compute a histogram of degrees.

Code cell 11

# Your Solution
# Exercise 3 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 3.")

Code cell 12

# Solution
# Exercise 3 - Degree Distribution
header("Exercise 3: degree histogram")
rng=np.random.default_rng(3); n=100; p=0.04; R=rng.random((n,n)); A=np.triu((R<p).astype(float),1); A=A+A.T
deg=A.sum(axis=1).astype(int); hist=np.bincount(deg)
print(hist[:10])
check_close("hist sums to n", hist.sum(), n)

Exercise 4: Connectivity Threshold

Compare sparse and denser ER components.

Code cell 14

# Your Solution
# Exercise 4 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 4.")

Code cell 15

# Solution
# Exercise 4 - Connectivity Threshold
header("Exercise 4: connectivity threshold")
rng=np.random.default_rng(4); n=60
counts=[]
for p in [0.02,0.12]:
    R=rng.random((n,n)); A=np.triu((R<p).astype(float),1); A=A+A.T; counts.append(len(components(A)))
print(counts)
check_true("denser has no more components", counts[1]<=counts[0])

Exercise 5: Random Walk Stationary Degree

For an undirected graph, stationary distribution is proportional to degree.

Code cell 17

# Your Solution
# Exercise 5 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 5.")

Code cell 18

# Solution
# Exercise 5 - Random Walk Stationary Degree
header("Exercise 5: stationary degree")
A=adj_from_edges(4,[(0,1),(1,2),(1,3)])
d=A.sum(axis=1); pi=d/d.sum(); P=A/(d[:,None]+1e-12)
check_close("stationary", pi@P, pi, tol=1e-10)

Exercise 6: Stochastic Block Model

Sample two communities and compare within vs between edge counts.

Code cell 20

# Your Solution
# Exercise 6 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 6.")

Code cell 21

# Solution
# Exercise 6 - Stochastic Block Model
header("Exercise 6: SBM")
rng=np.random.default_rng(6); n=40; labels=np.r_[np.zeros(20,int),np.ones(20,int)]; A=np.zeros((n,n))
for i in range(n):
    for j in range(i+1,n):
        p=0.25 if labels[i]==labels[j] else 0.02
        if rng.random()<p: A[i,j]=A[j,i]=1
within=sum(A[i,j] for i in range(n) for j in range(i+1,n) if labels[i]==labels[j]); between=sum(A[i,j] for i in range(n) for j in range(i+1,n) if labels[i]!=labels[j])
print(within,between)
check_true("more within edges", within>between)

Exercise 7: Preferential Attachment

Grow a tiny graph with degree-proportional attachment.

Code cell 23

# Your Solution
# Exercise 7 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 7.")

Code cell 24

# Solution
# Exercise 7 - Preferential Attachment
header("Exercise 7: preferential attachment")
rng=np.random.default_rng(7); A=adj_from_edges(3,[(0,1),(1,2),(2,0)])
for new in range(3,10):
    deg=A.sum(axis=1); probs=deg/deg.sum(); target=rng.choice(A.shape[0],p=probs)
    A=np.pad(A,((0,1),(0,1))); A[new,target]=A[target,new]=1
print("degrees",A.sum(axis=1))
check_true("graph has 10 nodes", A.shape==(10,10))

Exercise 8: Giant Component

Find largest component size in a random graph.

Code cell 26

# Your Solution
# Exercise 8 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 8.")

Code cell 27

# Solution
# Exercise 8 - Giant Component
header("Exercise 8: giant component")
rng=np.random.default_rng(8); n=80; p=0.04; R=rng.random((n,n)); A=np.triu((R<p).astype(float),1); A=A+A.T
largest=max(len(c) for c in components(A))
print("largest", largest)
check_true("nontrivial component", largest>5)

Exercise 9: Clustering in ER

Estimate global triangle density roughly.

Code cell 29

# Your Solution
# Exercise 9 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 9.")

Code cell 30

# Solution
# Exercise 9 - Clustering in ER
header("Exercise 9: ER clustering")
rng=np.random.default_rng(9); n=50; p=0.1; R=rng.random((n,n)); A=np.triu((R<p).astype(float),1); A=A+A.T
tri=np.trace(la.matrix_power(A,3))/6; wedges=sum(d*(d-1)/2 for d in A.sum(axis=1))
clust=3*tri/wedges if wedges else 0
print("clustering", clust)
check_true("valid", 0<=clust<=1)

Exercise 10: Seed Reproducibility

Show fixed random seeds reproduce the same graph.

Code cell 32

# Your Solution
# Exercise 10 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 10.")

Code cell 33

# Solution
# Exercise 10 - Seed Reproducibility
header("Exercise 10: reproducibility")
def sample(seed):
    rng=np.random.default_rng(seed); R=rng.random((10,10)); A=np.triu((R<0.2).astype(float),1); return A+A.T
check_close("same seed", sample(1), sample(1))
check_true("different seed differs", not np.allclose(sample(1), sample(2)))