-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.py
54 lines (40 loc) · 1.93 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
from cvx_optimization_iterative_methods import NewtonMethod, GradientDescentWithLineSearch
while True:
print('Enter the method you want to use:\n\n(1)gradient descent\n(2)newton method')
option = input()
if not option.__eq__('1') and not option.__eq__('2'):
exit(0)
if option.__eq__('1'):
print('Gradient Descent with line search:\n')
print('Enter the number of function\'s variable:')
n = int(input())
if n < 1:
raise Exception('Number of variables must be more than one.')
print('Enter the function\'s symbols:')
function_symbols = [input() for i in range(0, n)]
print('Enter the function:')
function = input()
gd = GradientDescentWithLineSearch(function_symbols, function)
print('Enter the first point as the same order as variables:')
initial_point = [float(input()) for j in range(0, n)]
print('Entered function: \"f = ' + function + '\"' + ' in initial point: ' + str(initial_point))
print('Starting gradient descent with line search:')
gd.gradient_descent(initial_point)
print('\n')
if option.__eq__('2'):
print('Newton method:\n')
print('Enter the number of function\'s variable:')
n = int(input())
if n < 1:
raise Exception('Number of variables must be more than one.')
print('Enter the function\'s symbols:')
function_symbols = [input() for i in range(0, n)]
print('Enter the function:')
function = input()
newton = NewtonMethod(function_symbols, function)
print('Enter the first point as the same order as variables:')
initial_point = [float(input()) for j in range(0, n)]
print('Entered function: \"f = ' + function + '\"' + ' in initial point: ' + str(initial_point))
print('Starting newton optimization method:')
newton.newton_opt(initial_point)
print('\n')