1616
1717import numpy as np
1818from scipy .optimize import OptimizeResult
19+ import tensorflow as tf
20+ from gpflow .gpr import GPR
1921
2022from .acquisition import Acquisition , MCMCAcquistion
21- from .optim import Optimizer , SciPyOptimizer
22- from .objective import ObjectiveWrapper
2323from .design import Design , EmptyDesign
24+ from .objective import ObjectiveWrapper
25+ from .optim import Optimizer , SciPyOptimizer
2426from .pareto import non_dominated_sort
27+ from .models import ModelWrapper
28+
29+
30+ def jitchol_callback (models ):
31+ """
32+ Increase the likelihood in case of Cholesky failures.
33+
34+ This is similar to the use of jitchol in GPy. Default callback for BayesianOptimizer.
35+ Only usable on GPR models, other types are ignored.
36+ """
37+ for m in np .atleast_1d (models ):
38+ if isinstance (m , ModelWrapper ):
39+ jitchol_callback (m .wrapped ) # pragma: no cover
40+
41+ if not isinstance (m , GPR ):
42+ continue
43+
44+ s = m .get_free_state ()
45+ eKdiag = np .mean (np .diag (m .kern .compute_K_symm (m .X .value )))
46+ for e in [0 ] + [10 ** ex for ex in range (- 6 ,- 1 )]:
47+ try :
48+ m .likelihood .variance = m .likelihood .variance .value + e * eKdiag
49+ m .optimize (maxiter = 5 )
50+ break
51+ except tf .errors .InvalidArgumentError : # pragma: no cover
52+ m .set_state (s )
2553
2654
2755class BayesianOptimizer (Optimizer ):
@@ -32,7 +60,8 @@ class BayesianOptimizer(Optimizer):
3260 Additionally, it is configured with a separate optimizer for the acquisition function.
3361 """
3462
35- def __init__ (self , domain , acquisition , optimizer = None , initial = None , scaling = True , hyper_draws = None ):
63+ def __init__ (self , domain , acquisition , optimizer = None , initial = None , scaling = True , hyper_draws = None ,
64+ callback = jitchol_callback ):
3665 """
3766 :param Domain domain: The optimization space.
3867 :param Acquisition acquisition: The acquisition function to optimize over the domain.
@@ -51,6 +80,12 @@ def __init__(self, domain, acquisition, optimizer=None, initial=None, scaling=Tr
5180 are obtained using Hamiltonian MC.
5281 (see `GPflow documentation <https://gpflow.readthedocs.io/en/latest//>`_ for details) for each model.
5382 The acquisition score is computed for each draw, and averaged.
83+ :param callable callback: (optional) this function or object will be called, after the
84+ data of all models has been updated with all models as retrieved by acquisition.models as argument without
85+ the wrapping model handling any scaling . This allows custom model optimization strategies to be implemented.
86+ All manipulations of GPflow models are permitted. Combined with the optimize_restarts parameter of
87+ :class:`~.Acquisition` this allows several scenarios: do the optimization manually from the callback
88+ (optimize_restarts equals 0), or choose the starting point + some random restarts (optimize_restarts > 0).
5489 """
5590 assert isinstance (acquisition , Acquisition )
5691 assert hyper_draws is None or hyper_draws > 0
@@ -69,6 +104,8 @@ def __init__(self, domain, acquisition, optimizer=None, initial=None, scaling=Tr
69104 initial = initial or EmptyDesign (domain )
70105 self .set_initial (initial .generate ())
71106
107+ self ._model_callback = callback
108+
72109 @Optimizer .domain .setter
73110 def domain (self , dom ):
74111 assert self .domain .size == dom .size
@@ -86,6 +123,8 @@ def _update_model_data(self, newX, newY):
86123 assert self .acquisition .data [0 ].shape [1 ] == newX .shape [- 1 ]
87124 assert self .acquisition .data [1 ].shape [1 ] == newY .shape [- 1 ]
88125 assert newX .shape [0 ] == newY .shape [0 ]
126+ if newX .size == 0 :
127+ return
89128 X = np .vstack ((self .acquisition .data [0 ], newX ))
90129 Y = np .vstack ((self .acquisition .data [1 ], newY ))
91130 self .acquisition .set_data (X , Y )
@@ -174,7 +213,6 @@ def _optimize(self, fx, n_iter):
174213 :param n_iter: number of iterations to run
175214 :return: OptimizeResult object
176215 """
177-
178216 assert isinstance (fx , ObjectiveWrapper )
179217
180218 # Evaluate and add the initial design (if any)
@@ -190,6 +228,10 @@ def inverse_acquisition(x):
190228
191229 # Optimization loop
192230 for i in range (n_iter ):
231+ # If a callback is specified, and acquisition has the setup flag enabled (indicating an upcoming
232+ # compilation), run the callback.
233+ if self ._model_callback and self .acquisition ._needs_setup :
234+ self ._model_callback ([m .wrapped for m in self .acquisition .models ])
193235 result = self .optimizer .optimize (inverse_acquisition )
194236 self ._update_model_data (result .x , fx (result .x ))
195237
0 commit comments