Multiple Inputs Single Output Optimization

In the following we will demonstrate a multi-input single-output optimisation of the Hartmann6 function, which is a 6-dimensional function with 1 global minimum of f(x) = -3.32 at x = (0.202, 0.150, 0.477, 0.275, 0.312, 0.657).

The code repeats the optimisation three times and calculates the averages and standard deviations for the best value at each step

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import torch
import pandas as pd

from odyssey.mission import Mission # Mission
from odyssey.objective import Objective # Objective
from odyssey.navigators import SingleGP_Navigator # Navigator
from odyssey.navigators.sampler_navigators import Sobol_Navigator # Initial Sampler
from odyssey.navigators import UpperConfidenceBound, ExpectedImprovement # Acquisition Function

def hartmann6(X):
    """Hartmann6 function (6-dimensional with 1 global minimum) using PyTorch."""
    alpha = torch.tensor([1.0, 1.2, 3.0, 3.2])
    A = torch.tensor([
        [10, 3, 17, 3.5, 1.7, 8],
        [0.05, 10, 17, 0.1, 8, 14],
        [3, 3.5, 1.7, 10, 17, 8],
        [17, 8, 0.05, 10, 0.1, 14],
    ])
    P = 10 ** (-4) * torch.tensor([
        [1312, 1696, 5569, 124, 8283, 5886],
        [2329, 4135, 8307, 3736, 1004, 9991],
        [2348, 1451, 3522, 2883, 3047, 6650],
        [4047, 8828, 8732, 5743, 1091, 381],
    ])

    y = 0.0
    for j, alpha_j in enumerate(alpha):
        t = torch.sum(A[j] * (X - P[j]) ** 2)
        y -= alpha_j * torch.exp(-t)

    return y

num_init_design = 10
num_iter = 50

all_X = []
all_Y = []

all_best_ys = []

for i in range(3):
    objective = Objective(hartmann6)

    param_space = [(0.0, 1.0) for _ in range(6)]

    mission = Mission(
        name = 'miso_test',
        funcs=[objective],
        maneuvers=["descend"],
        envelope= param_space,
    )
    # set up navigator
    navigator = SingleGP_Navigator(
        mission = mission,
        num_init_design = num_init_design,
        input_scaling = False,
        data_standardization = False,
        init_method = Sobol_Navigator(mission = mission),
        acq_function_type = ExpectedImprovement,
        acq_function_params = {'best_f': 0.},
    )

    # BO loop
    best_ys = []
    best_y = 1E9
    opt_samples = len(mission.display_X) - num_init_design
    while opt_samples < num_iter:
        trajectory = navigator.trajectory()
        observation = navigator.probe(trajectory, init = False)
        print(len(mission.display_X) - num_init_design, trajectory, observation)
        navigator.relay(trajectory, observation)
        navigator.upgrade()

        y = -observation.item()  # take negative to convert back to original
        if y < best_y:
            best_y = y
        LOG.info(f"best y = {best_y}")
        best_ys.append(best_y)
        opt_samples = len(mission.display_X) - num_init_design

    all_best_ys.append(torch.tensor(best_ys))

stacked_Y = torch.stack(all_best_ys)
mean_y = torch.mean(stacked_Y, dim=0)
std_y = torch.std(stacked_Y, dim=0)
min_y = torch.min(stacked_Y, dim=0).values
max_y = torch.max(stacked_Y, dim=0).values


# Create a DataFrame with entry numbers and average values
df = pd.DataFrame({
    'iteration': range(1, num_iter + 1),  # Entry numbers start from 1
    'average_y': mean_y.flatten().tolist(),  # Convert tensor to a Python list
    "std_dev": std_y.flatten().tolist(),
    "min_y": min_y.flatten().tolist(),
    "max_y": max_y.flatten().tolist(),
})

print(df)

The final results will look similar to the below

    iteration  average_y   std_dev     min_y     max_y
0           1   1.586762  0.751536  0.928896  2.405812
1           2   1.919769  0.621179  1.474741  2.629444
2           3   2.075202  0.612735  1.655122  2.778288
3           4   2.148868  0.546707  1.792197  2.778288
4           5   2.317580  0.470392  1.876119  2.812358
5           6   2.538105  0.335098  2.164590  2.812358
6           7   2.731283  0.261033  2.462628  2.983960
7           8   2.806066  0.156982  2.686976  2.983960
8           9   2.890985  0.185828  2.686976  3.050586
9          10   2.905339  0.204995  2.686976  3.093648
10         11   2.944499  0.253593  2.695581  3.202522
11         12   3.049329  0.222026  2.832777  3.276452
12         13   3.049329  0.222026  2.832777  3.276452
13         14   3.091559  0.200874  2.919533  3.312315
14         15   3.100904  0.193038  2.934023  3.312315
15         16   3.102362  0.192549  2.934023  3.312315
16         17   3.105441  0.192583  2.937678  3.315735
17         18   3.111389  0.184954  2.955457  3.315735
18         19   3.115784  0.180052  2.966477  3.315735
19         20   3.116531  0.179126  2.968718  3.315735
20         21   3.117735  0.180822  2.968718  3.318900
21         22   3.120218  0.178086  2.975520  3.319104
22         23   3.122956  0.174877  2.983333  3.319104
23         24   3.126223  0.171012  2.993136  3.319104
24         25   3.131209  0.165489  3.007126  3.319104
25         26   3.138320  0.157770  3.028456  3.319104
26         27   3.150026  0.148558  3.060911  3.321522
27         28   3.161260  0.139437  3.068032  3.321558
28         29   3.171317  0.133127  3.068032  3.321559
29         30   3.179919  0.129108  3.068819  3.321559
30         31   3.182996  0.128312  3.069028  3.321971
31         32   3.190621  0.126485  3.069639  3.321971
32         33   3.192119  0.126328  3.069639  3.321971
33         34   3.193183  0.125814  3.070570  3.321971
34         35   3.193474  0.125389  3.071444  3.321971
35         36   3.195680  0.124851  3.072320  3.321971
36         37   3.196033  0.124328  3.073381  3.321971
37         38   3.196417  0.123760  3.074532  3.321971
38         39   3.196816  0.123172  3.075727  3.321971
39         40   3.197420  0.122282  3.077541  3.321971
40         41   3.198076  0.121319  3.079507  3.321971
41         42   3.199214  0.120168  3.081813  3.321971
42         43   3.199971  0.119061  3.084084  3.321971
43         44   3.200544  0.118226  3.085803  3.321971
44         45   3.200600  0.118145  3.085970  3.321971
45         46   3.200626  0.118107  3.086048  3.321971
46         47   3.200929  0.117666  3.086957  3.321971
47         48   3.200929  0.117666  3.086957  3.321971
48         49   3.202110  0.115955  3.090501  3.321971
49         50   3.202916  0.114795  3.092917  3.321971