Skip to content

Instantly share code, notes, and snippets.

@ProfAvery
Last active October 8, 2024 17:43
Show Gist options
  • Save ProfAvery/6292e65bb574769fb3506b6159ac7982 to your computer and use it in GitHub Desktop.
Save ProfAvery/6292e65bb574769fb3506b6159ac7982 to your computer and use it in GitHub Desktop.
California State University, Fullerton - CPSC 481 - Empirical gradient and gradient descent
#!/usr/bin/env python
def derivative(f, h=1e-5):
def df(x):
return (f(x + h) - f(x)) / h
return df
if __name__ == "__main__":
f = lambda x: x * x
df = derivative(f)
ddf = derivative(df)
print("x\tf(x)\tf'(x)\tf''(x)")
print("-\t----\t-----\t------")
for i in range(0, 10):
print(f"{i}\t{f(i)}\t{round(df(i))}\t{round(ddf(i))}")
#!/usr/bin/env python
import random
from empirical_gradient import derivative
def gradient_descent(f, location, delta=1e-2, tolerance=1e-5):
df = derivative(f)
while abs(df(location)) > tolerance:
gradient = df(location)
location -= delta * gradient
return location
f = lambda x: (x - 1) ** 2 + 2
start = random.uniform(-10, 10)
print(f"Descending from x = {start}...")
min = gradient_descent(f, start)
y = f(min)
print(f"The minimum value of f(x) is f({round(min)}) = {round(y)}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment