import math
import matplotlib.pyplot as plt
import numpy as np
import time
from IPython.display import clear_output
%matplotlib notebook
fig = plt.figure()
ax = fig.add_subplot(111)
plt.ion()
def get_y(x):
if x <= -4:
return math.sin(-4)
return math.sin(x) / (x + 5)
def get_dy(x):
if x <= -4:
return 0
return ((x + 5) * math.cos(x) - math.sin(x)) / ((x + 5)**2)
def visualize(point = 0, title = "Gradient Descent Demo"):
x = np.array(range(25000)) / 1000.0 - 5
y = []
for i in range(len(x)):
y.append(get_y(x[i]))
ax.clear()
ax.plot(x, y)
ax.set_xlabel("x axis")
ax.set_ylabel("y axis")
ax.set_title(title)
ax.plot([point], [get_y(point)], marker='o', markersize=5, color="red")
fig.canvas.draw()
def gradient_descent(init = 7, N = 3000, step_size = 0.01, visualize_frequency = 100):
curr = init
for i in range(N):
curr -= get_dy(curr) * step_size
if i % visualize_frequency == 0:
visualize(curr, "Iteration = " + str(i + 1))
time.sleep(0.3)
return curr
#minima = gradient_descent(init = 7)
#visualize(minima, "Local Minima At " + str(minima))
x = -2.4402945137518104
## Over-Shoot
#gradient_descent(init = 0, N = 10, step_size = 12.21, visualize_frequency = 1)
## Bouncing Back-n-Forth
#gradient_descent(init = 0, N = 50, step_size = 12.201472568759052, visualize_frequency = 1)
## Random Movements
#gradient_descent(init = 0, N = 100, step_size = 10, visualize_frequency = 1)
# # Suboptimal starting position
# gradient_descent(init = 1.5, N = 20, step_size = 6, visualize_frequency = 1)
# Converge -- By Luck
gradient_descent(init = 0, N = 20, step_size = 6, visualize_frequency = 1)