Multi-objective Bayesian OptimizationĀ¶
TNK function $n=2$ variables: $x_i \in [0, \pi], i=1,2$
Objectives:
- $f_i(x) = x_i$
Constraints:
- $g_1(x) = -x_1^2 -x_2^2 + 1 + 0.1 \cos\left(16 \arctan \frac{x_1}{x_2}\right) \le 0$
- $g_2(x) = (x_1 - 1/2)^2 + (x_2-1/2)^2 \le 0.5$
InĀ [1]:
Copied!
# set values if testing
import os
SMOKE_TEST = os.environ.get("SMOKE_TEST")
N_MC_SAMPLES = 1 if SMOKE_TEST else 128
NUM_RESTARTS = 1 if SMOKE_TEST else 20
N_STEPS = 1 if SMOKE_TEST else 30
# Ignore all warnings
import warnings
warnings.filterwarnings("ignore")
import pandas as pd
import numpy as np
import torch
from xopt import Xopt, Evaluator
from xopt.generators.bayesian import MOBOGenerator
from xopt.resources.test_functions.tnk import evaluate_TNK, tnk_vocs
evaluator = Evaluator(function=evaluate_TNK)
print(tnk_vocs.dict())
# set values if testing
import os
SMOKE_TEST = os.environ.get("SMOKE_TEST")
N_MC_SAMPLES = 1 if SMOKE_TEST else 128
NUM_RESTARTS = 1 if SMOKE_TEST else 20
N_STEPS = 1 if SMOKE_TEST else 30
# Ignore all warnings
import warnings
warnings.filterwarnings("ignore")
import pandas as pd
import numpy as np
import torch
from xopt import Xopt, Evaluator
from xopt.generators.bayesian import MOBOGenerator
from xopt.resources.test_functions.tnk import evaluate_TNK, tnk_vocs
evaluator = Evaluator(function=evaluate_TNK)
print(tnk_vocs.dict())
{'variables': {'x1': [0.0, 3.14159], 'x2': [0.0, 3.14159]}, 'constraints': {'c1': ['GREATER_THAN', 0.0], 'c2': ['LESS_THAN', 0.5]}, 'objectives': {'y1': 'MINIMIZE', 'y2': 'MINIMIZE'}, 'constants': {'a': 'dummy_constant'}, 'observables': []}
InĀ [2]:
Copied!
generator = MOBOGenerator(vocs=tnk_vocs, reference_point = {"y1":1.5,"y2":1.5})
generator.n_monte_carlo_samples = N_MC_SAMPLES
generator.numerical_optimizer.n_restarts = NUM_RESTARTS
X = Xopt(generator=generator, evaluator=evaluator, vocs=tnk_vocs)
X.evaluate_data(pd.DataFrame({"x1":[1.0, 0.75],"x2":[0.75, 1.0]}))
for i in range(N_STEPS):
print(i)
X.step()
generator = MOBOGenerator(vocs=tnk_vocs, reference_point = {"y1":1.5,"y2":1.5})
generator.n_monte_carlo_samples = N_MC_SAMPLES
generator.numerical_optimizer.n_restarts = NUM_RESTARTS
X = Xopt(generator=generator, evaluator=evaluator, vocs=tnk_vocs)
X.evaluate_data(pd.DataFrame({"x1":[1.0, 0.75],"x2":[0.75, 1.0]}))
for i in range(N_STEPS):
print(i)
X.step()
0
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
InĀ [3]:
Copied!
X.generator.data
X.generator.data
Out[3]:
x1 | x2 | a | y1 | y2 | c1 | c2 | xopt_runtime | xopt_error | |
---|---|---|---|---|---|---|---|---|---|
0 | 1.000000 | 0.750000 | dummy_constant | 1.000000 | 0.750000 | 0.626888 | 0.312500 | 0.000037 | False |
1 | 0.750000 | 1.000000 | dummy_constant | 0.750000 | 1.000000 | 0.626888 | 0.312500 | 0.000007 | False |
2 | 2.506948 | 1.171228 | dummy_constant | 2.506948 | 1.171228 | 6.580710 | 4.478386 | 0.000027 | False |
3 | 0.375852 | 0.146844 | dummy_constant | 0.375852 | 0.146844 | -0.931975 | 0.140132 | 0.000026 | False |
4 | 0.045862 | 0.053405 | dummy_constant | 0.045862 | 0.053405 | -1.030033 | 0.405689 | 0.000027 | False |
5 | 0.000000 | 0.446234 | dummy_constant | 0.000000 | 0.446234 | -0.900875 | 0.252891 | 0.000026 | False |
6 | 0.012219 | 0.939585 | dummy_constant | 0.012219 | 0.939585 | -0.214875 | 0.431165 | 0.000028 | False |
7 | 0.215979 | 1.048498 | dummy_constant | 0.215979 | 1.048498 | 0.245404 | 0.381518 | 0.000028 | False |
8 | 1.009845 | 0.347483 | dummy_constant | 1.009845 | 0.347483 | 0.084887 | 0.283203 | 0.000039 | False |
9 | 0.570812 | 0.835164 | dummy_constant | 0.570812 | 0.835164 | 0.121917 | 0.117349 | 0.000033 | False |
10 | 0.082674 | 1.028339 | dummy_constant | 0.082674 | 1.028339 | 0.035987 | 0.453303 | 0.000027 | False |
11 | 0.815941 | 0.649928 | dummy_constant | 0.815941 | 0.649928 | 0.111309 | 0.122297 | 0.000027 | False |
12 | 0.012334 | 0.856008 | dummy_constant | 0.012334 | 0.856008 | -0.364453 | 0.364559 | 0.000027 | False |
13 | 0.300986 | 0.915768 | dummy_constant | 0.300986 | 0.915768 | -0.106786 | 0.212470 | 0.000029 | False |
14 | 0.208836 | 3.018894 | dummy_constant | 0.208836 | 3.018894 | 8.112425 | 6.429603 | 0.000026 | False |
15 | 0.909644 | 0.485959 | dummy_constant | 0.909644 | 0.485959 | 0.063257 | 0.168005 | 0.000028 | False |
16 | 1.066541 | 0.221608 | dummy_constant | 1.066541 | 0.221608 | 0.285693 | 0.398471 | 0.000028 | False |
17 | 0.570435 | 0.179447 | dummy_constant | 0.570435 | 0.179447 | -0.658735 | 0.107715 | 0.000027 | False |
18 | 0.983754 | 0.041109 | dummy_constant | 0.983754 | 0.041109 | -0.109031 | 0.444599 | 0.000026 | False |
19 | 0.374073 | 0.937420 | dummy_constant | 0.374073 | 0.937420 | -0.079153 | 0.207194 | 0.000028 | False |
20 | 0.848629 | 0.551869 | dummy_constant | 0.848629 | 0.551869 | 0.122751 | 0.124233 | 0.000027 | False |
21 | 0.089847 | 0.861583 | dummy_constant | 0.089847 | 0.861583 | -0.240446 | 0.298968 | 0.000026 | False |
22 | 0.689340 | 0.744406 | dummy_constant | 0.689340 | 0.744406 | -0.052392 | 0.095584 | 0.000025 | False |
23 | 0.514071 | 0.538306 | dummy_constant | 0.514071 | 0.538306 | -0.539247 | 0.001665 | 0.000029 | False |
24 | 0.327539 | 0.436737 | dummy_constant | 0.327539 | 0.436737 | -0.637567 | 0.033745 | 0.000028 | False |
25 | 0.764351 | 0.541962 | dummy_constant | 0.764351 | 0.541962 | -0.031723 | 0.071642 | 0.000028 | False |
26 | 1.038403 | 0.072073 | dummy_constant | 1.038403 | 0.072073 | 0.038896 | 0.473000 | 0.000027 | False |
27 | 0.350563 | 0.796910 | dummy_constant | 0.350563 | 0.796910 | -0.336058 | 0.110487 | 0.000027 | False |
28 | 1.338238 | 0.015799 | dummy_constant | 1.338238 | 0.015799 | 0.692909 | 0.937093 | 0.000025 | False |
29 | 0.085330 | 3.133850 | dummy_constant | 0.085330 | 3.133850 | 8.737635 | 7.109118 | 0.000028 | False |
30 | 0.046127 | 0.776113 | dummy_constant | 0.046127 | 0.776113 | -0.453706 | 0.282239 | 0.000029 | False |
31 | 0.492104 | 0.901035 | dummy_constant | 0.492104 | 0.901035 | 0.068400 | 0.160891 | 0.000027 | False |
plot resultsĀ¶
InĀ [4]:
Copied!
import matplotlib.pyplot as plt
fig, ax = plt.subplots()
theta = np.linspace(0, np.pi / 2)
r = np.sqrt(1 + 0.1 * np.cos(16 * theta))
x_1 = r * np.sin(theta)
x_2_lower = r * np.cos(theta)
x_2_upper = (0.5 - (x_1 - 0.5) ** 2) ** 0.5 + 0.5
z = np.zeros_like(x_1)
# ax2.plot(x_1, x_2_lower,'r')
ax.fill_between(x_1, z, x_2_lower, fc="white")
circle = plt.Circle(
(0.5, 0.5), 0.5 ** 0.5, color="r", alpha=0.25, zorder=0, label="Valid Region"
)
ax.add_patch(circle)
history = pd.concat(
[X.data, tnk_vocs.feasibility_data(X.data)], axis=1, ignore_index=False
)
ax.plot(*history[["x1", "x2"]][history["feasible"]].to_numpy().T, ".C1")
ax.plot(*history[["x1", "x2"]][~history["feasible"]].to_numpy().T, ".C2")
ax.set_xlim(0, 3.14)
ax.set_ylim(0, 3.14)
ax.set_xlabel("x1")
ax.set_ylabel("x2")
ax.set_aspect("equal")
import matplotlib.pyplot as plt
fig, ax = plt.subplots()
theta = np.linspace(0, np.pi / 2)
r = np.sqrt(1 + 0.1 * np.cos(16 * theta))
x_1 = r * np.sin(theta)
x_2_lower = r * np.cos(theta)
x_2_upper = (0.5 - (x_1 - 0.5) ** 2) ** 0.5 + 0.5
z = np.zeros_like(x_1)
# ax2.plot(x_1, x_2_lower,'r')
ax.fill_between(x_1, z, x_2_lower, fc="white")
circle = plt.Circle(
(0.5, 0.5), 0.5 ** 0.5, color="r", alpha=0.25, zorder=0, label="Valid Region"
)
ax.add_patch(circle)
history = pd.concat(
[X.data, tnk_vocs.feasibility_data(X.data)], axis=1, ignore_index=False
)
ax.plot(*history[["x1", "x2"]][history["feasible"]].to_numpy().T, ".C1")
ax.plot(*history[["x1", "x2"]][~history["feasible"]].to_numpy().T, ".C2")
ax.set_xlim(0, 3.14)
ax.set_ylim(0, 3.14)
ax.set_xlabel("x1")
ax.set_ylabel("x2")
ax.set_aspect("equal")
Plot path through input spaceĀ¶
InĀ [5]:
Copied!
ax = history.plot("x1", "x2")
ax.set_ylim(0, 3.14)
ax.set_xlim(0, 3.14)
ax.set_aspect("equal")
ax = history.plot("x1", "x2")
ax.set_ylim(0, 3.14)
ax.set_xlim(0, 3.14)
ax.set_aspect("equal")
InĀ [6]:
Copied!
# plot the acquisition function
from xopt.generators.bayesian.objectives import feasibility
bounds = X.generator.vocs.bounds
model = X.generator.model
# create mesh
n = 200
x = torch.linspace(*bounds.T[0], n)
y = torch.linspace(*bounds.T[1], n)
xx, yy = torch.meshgrid(x, y)
pts = torch.hstack([ele.reshape(-1, 1) for ele in (xx, yy)]).double()
xx, yy = xx.numpy(), yy.numpy()
acq_func = X.generator.get_acquisition(model)
with torch.no_grad():
acq_pts = pts.unsqueeze(1)
acq = acq_func(acq_pts)
fig, ax = plt.subplots()
c = ax.pcolor(xx, yy, acq.reshape(n, n), cmap="Blues")
fig.colorbar(c)
ax.set_title("Acquisition function")
ax.plot(*history[["x1", "x2"]][history["feasible"]].to_numpy().T, ".C1")
ax.plot(*history[["x1", "x2"]][~history["feasible"]].to_numpy().T, ".C2")
ax.plot(*history[["x1", "x2"]].to_numpy()[-1].T, "+")
feas = feasibility(pts.unsqueeze(1), model, tnk_vocs).flatten()
fig2, ax2 = plt.subplots()
c = ax2.pcolor(xx, yy, feas.reshape(n, n))
fig2.colorbar(c)
ax2.set_title("Feasible Region")
candidate = pd.DataFrame(X.generator.generate(1), index=[0])
print(candidate[["x1", "x2"]].to_numpy())
ax.plot(*candidate[["x1", "x2"]].to_numpy()[0], "o")
# plot the acquisition function
from xopt.generators.bayesian.objectives import feasibility
bounds = X.generator.vocs.bounds
model = X.generator.model
# create mesh
n = 200
x = torch.linspace(*bounds.T[0], n)
y = torch.linspace(*bounds.T[1], n)
xx, yy = torch.meshgrid(x, y)
pts = torch.hstack([ele.reshape(-1, 1) for ele in (xx, yy)]).double()
xx, yy = xx.numpy(), yy.numpy()
acq_func = X.generator.get_acquisition(model)
with torch.no_grad():
acq_pts = pts.unsqueeze(1)
acq = acq_func(acq_pts)
fig, ax = plt.subplots()
c = ax.pcolor(xx, yy, acq.reshape(n, n), cmap="Blues")
fig.colorbar(c)
ax.set_title("Acquisition function")
ax.plot(*history[["x1", "x2"]][history["feasible"]].to_numpy().T, ".C1")
ax.plot(*history[["x1", "x2"]][~history["feasible"]].to_numpy().T, ".C2")
ax.plot(*history[["x1", "x2"]].to_numpy()[-1].T, "+")
feas = feasibility(pts.unsqueeze(1), model, tnk_vocs).flatten()
fig2, ax2 = plt.subplots()
c = ax2.pcolor(xx, yy, feas.reshape(n, n))
fig2.colorbar(c)
ax2.set_title("Feasible Region")
candidate = pd.DataFrame(X.generator.generate(1), index=[0])
print(candidate[["x1", "x2"]].to_numpy())
ax.plot(*candidate[["x1", "x2"]].to_numpy()[0], "o")
[[1.0521425 0.08859294]]
Out[6]:
[<matplotlib.lines.Line2D at 0x7f29504f6df0>]
InĀ [7]:
Copied!
%%time
candidate = X.generator.generate(1)
%%time
candidate = X.generator.generate(1)
CPU times: user 804 ms, sys: 35.7 ms, total: 840 ms Wall time: 421 ms