-
Notifications
You must be signed in to change notification settings - Fork 0
/
my_optimizer.py
84 lines (67 loc) · 2.29 KB
/
my_optimizer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import numpy as np
from matplotlib import pyplot
import matplotlib.pyplot as plt
def objective(x, y):
return x**2.0 + y**2.0
def objective_gradient(x, y):
return x**2.0 + y**2.0
def show_3D():
# define range for input
r_min, r_max = -1.0, 1.0
# sample input range uniformly at 0.1 increments
xaxis = np.arange(r_min, r_max, 0.1)
yaxis = np.arange(r_min, r_max, 0.1)
# create a mesh from the axis
x, y = np.meshgrid(xaxis, yaxis)
# compute targets
results = objective(x, y)
# create a surface plot with the jet color scheme
figure = pyplot.figure()
axis = figure.gca(projection='3d')
axis.plot_surface(x, y, results, cmap='jet')
# show the plot
pyplot.show()
def show_2D():
# define range for input
bounds = np.asarray([[-1.0, 1.0], [-1.0, 1.0]])
# sample input range uniformly at 0.1 increments
xaxis = np.arange(bounds[0, 0], bounds[0, 1], 0.1)
yaxis = np.arange(bounds[1, 0], bounds[1, 1], 0.1)
# create a mesh from the axis
x, y = np.meshgrid(xaxis, yaxis)
# compute targets
results = objective(x, y)
# create a filled contour plot with 50 levels and jet color scheme
pyplot.contourf(x, y, results, levels=50, cmap='jet')
# show the plot
pyplot.show()
def test_func(x):
return x**2
def test_gradient(x):
return 2*x
def gradient_descent(gradient, start, learning_rate, n_iterations, tolerance=1e-02):
x_axis = np.arange(-100, 101, 1)
y_axis = test_func(x_axis)
# plot base function in blue
plt.plot(x_axis, y_axis, c="blue", linestyle=':')
vector = start
changed_x = []
changed_y = []
for i in range(n_iterations):
diff = -learning_rate*gradient(vector)
if np.all(np.abs(diff) <= tolerance):
break
print("Iteration {} | Past: {}, New: {}".format(i, vector, vector+diff))
vector += diff
print("(x,y: ({},{})".format(vector, test_func(vector)))
changed_x.append(vector)
changed_y.append(test_func(vector))
print(x_axis)
print(test_func(x_axis))
# plot algo progress in red
plt.scatter(changed_x, changed_y, 10, c='red')
return vector
if __name__ == '__main__':
print(gradient_descent(lambda v: 2*v, 100, learning_rate=0.05, n_iterations=100))
#plt.show()
plt.show()