Exercises Notebook
Converted from
exercises.ipynbfor web reading.
Functions and Mappings - Exercises
This notebook contains 10 progressive exercises for 03-Functions-and-Mappings. Each exercise has a learner workspace followed by a complete reference solution. The goal is fluent foundational math for later linear algebra, calculus, probability, and ML sections.
Code cell 2
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
try:
import seaborn as sns
sns.set_theme(style="whitegrid", palette="colorblind")
HAS_SNS = True
except ImportError:
plt.style.use("seaborn-v0_8-whitegrid")
HAS_SNS = False
mpl.rcParams.update({
"figure.figsize": (10, 6),
"figure.dpi": 120,
"font.size": 13,
"axes.titlesize": 15,
"axes.labelsize": 13,
"xtick.labelsize": 11,
"ytick.labelsize": 11,
"legend.fontsize": 11,
"legend.framealpha": 0.85,
"lines.linewidth": 2.0,
"axes.spines.top": False,
"axes.spines.right": False,
"savefig.bbox": "tight",
"savefig.dpi": 150,
})
np.random.seed(42)
print("Plot setup complete.")
Code cell 3
import numpy as np
import numpy.linalg as la
from decimal import Decimal, getcontext
from itertools import product
np.set_printoptions(precision=8, suppress=True)
np.random.seed(42)
def header(title):
print("\n" + "=" * len(title))
print(title)
print("=" * len(title))
def check_true(name, cond):
ok=bool(cond)
print(f"{'PASS' if ok else 'FAIL'} - {name}")
return ok
def check_close(name, got, expected, tol=1e-8):
ok=np.allclose(got, expected, atol=tol, rtol=tol)
print(f"{'PASS' if ok else 'FAIL'} - {name}")
if not ok:
print(' got =', got)
print(' expected=', expected)
return ok
def powerset(s):
items=list(s)
return [set(items[i] for i in range(len(items)) if mask & (1 << i)) for mask in range(1 << len(items))]
print("Chapter 01 helper setup complete.")
Exercise 1: Domain and Codomain
Evaluate a function on a finite domain and list its image.
Code cell 5
# Your Solution
# Exercise 1 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 1.")
Code cell 6
# Solution
# Exercise 1 - Domain and Codomain
header("Exercise 1: image")
D=set(range(-3,4)); f=lambda x:x*x
image={f(x) for x in D}
print("image", image)
check_true("image subset nonnegative", all(y>=0 for y in image))
check_true("not injective", len(image)<len(D))
Exercise 2: Injective and Surjective
Test injectivity and surjectivity for finite maps.
Code cell 8
# Your Solution
# Exercise 2 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 2.")
Code cell 9
# Solution
# Exercise 2 - Injective and Surjective
header("Exercise 2: injective surjective")
D={0,1,2}; C={0,1,2,3}; mapping={0:1,1:2,2:3}
vals=list(mapping.values())
check_true("injective", len(set(vals))==len(vals))
check_true("not surjective onto C", set(vals)!=C)
Exercise 3: Composition
Verify .
Code cell 11
# Your Solution
# Exercise 3 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 3.")
Code cell 12
# Solution
# Exercise 3 - Composition
header("Exercise 3: composition")
f=lambda x:2*x+1; g=lambda y:y*y
xs=np.arange(5)
comp=np.array([g(f(x)) for x in xs])
check_close("composition", comp, (2*xs+1)**2)
Exercise 4: Inverse Function
Recover inputs through an invertible affine map.
Code cell 14
# Your Solution
# Exercise 4 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 4.")
Code cell 15
# Solution
# Exercise 4 - Inverse Function
header("Exercise 4: inverse")
A=np.array([[2.,1.],[1.,1.]]); b=np.array([0.5,-1.])
x=np.array([3.,2.]); y=A@x+b
x_rec=la.solve(A,y-b)
check_close("inverse recovers x", x_rec, x)
Exercise 5: Monotonicity
Check a monotone increasing sequence generated by a function.
Code cell 17
# Your Solution
# Exercise 5 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 5.")
Code cell 18
# Solution
# Exercise 5 - Monotonicity
header("Exercise 5: monotonicity")
xs=np.linspace(-2,2,21); y=np.exp(xs)
check_true("strictly increasing", np.all(np.diff(y)>0))
Exercise 6: Lipschitz Estimate
Estimate a Lipschitz constant for on sampled points.
Code cell 20
# Your Solution
# Exercise 6 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 6.")
Code cell 21
# Solution
# Exercise 6 - Lipschitz Estimate
header("Exercise 6: Lipschitz")
xs=np.linspace(-5,5,200); y=np.tanh(xs)
slopes=np.abs(np.diff(y)/np.diff(xs))
print("max sampled slope", slopes.max())
check_true("tanh is 1-Lipschitz", slopes.max() <= 1+1e-3)
Exercise 7: Fixed Point Iteration
Iterate and observe convergence.
Code cell 23
# Your Solution
# Exercise 7 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 7.")
Code cell 24
# Solution
# Exercise 7 - Fixed Point Iteration
header("Exercise 7: fixed point")
x=1.0
for _ in range(30): x=np.cos(x)
print("fixed point", x)
check_close("x approx cos x", x, np.cos(x), tol=1e-10)
Exercise 8: Function as Layer
Represent a dense neural-network layer as an affine function.
Code cell 26
# Your Solution
# Exercise 8 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 8.")
Code cell 27
# Solution
# Exercise 8 - Function as Layer
header("Exercise 8: dense layer")
X=np.array([[1.,2.],[3.,4.]]); W=np.array([[1.,-1.],[0.5,2.]]); b=np.array([0.1,-0.2])
Y=X@W.T+b
check_close("shape", np.array(Y.shape), np.array([2,2]))
check_close("first row", Y[0], W@X[0]+b)
Exercise 9: Many-to-One Mapping
Show how argmax maps many score vectors to one class id.
Code cell 29
# Your Solution
# Exercise 9 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 9.")
Code cell 30
# Solution
# Exercise 9 - Many-to-One Mapping
header("Exercise 9: argmax map")
S=np.array([[1.,2.,0.],[0.,3.,1.],[2.,0.,1.]])
y=np.argmax(S,axis=1)
print("classes", y)
check_close("expected", y, np.array([1,1,0]))
Exercise 10: Softmax Mapping
Map logits to the probability simplex.
Code cell 32
# Your Solution
# Exercise 10 - learner workspace
# Write your solution here, then run the reference solution below to compare.
print("Learner workspace ready for Exercise 10.")
Code cell 33
# Solution
# Exercise 10 - Softmax Mapping
header("Exercise 10: softmax")
z=np.array([2.,1.,-1.]); e=np.exp(z-z.max()); p=e/e.sum()
print("p", p)
check_close("sums to one", p.sum(), 1.0)
check_true("positive", np.all(p>0))