-
Notifications
You must be signed in to change notification settings - Fork 42
/
Copy pathmeta_optimization.py
62 lines (44 loc) · 1.46 KB
/
meta_optimization.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import numpy as np
from hyperactive import Hyperactive
from hyperactive.optimizers import BayesianOptimizer
from gradient_free_optimizers import RandomRestartHillClimbingOptimizer
def meta_opt(opt_para):
scores = []
for i in range(33):
def ackley_function(para):
x = para["x"]
y = para["y"]
loss1 = -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y)))
loss2 = -np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y)))
loss3 = np.exp(1)
loss4 = 20
loss = loss1 + loss2 + loss3 + loss4
return -loss
dim_size = np.arange(-6, 6, 0.01)
search_space = {
"x": dim_size,
"y": dim_size,
}
opt = RandomRestartHillClimbingOptimizer(
search_space,
random_state=i,
epsilon=opt_para["epsilon"],
n_neighbours=opt_para["n_neighbours"],
n_iter_restart=opt_para["n_iter_restart"],
)
opt.search(
ackley_function,
n_iter=100,
verbosity=False,
)
scores.append(opt.best_score)
return np.array(scores).sum()
search_space = {
"epsilon": list(np.arange(0.01, 0.1, 0.01)),
"n_neighbours": list(range(1, 10)),
"n_iter_restart": list(range(2, 12)),
}
optimizer = BayesianOptimizer()
hyper = Hyperactive()
hyper.add_search(meta_opt, search_space, n_iter=120, optimizer=optimizer)
hyper.run()