Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 37 additions & 5 deletions bayes_opt/bayesian_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ def __init__(self,
except (AttributeError, TypeError):
raise TypeError('The transformer must be an instance of '
'DomainTransformer')
self._dummies = []

super(BayesianOptimization, self).__init__(events=DEFAULT_EVENTS)

Expand All @@ -144,20 +145,51 @@ def constraint(self):
if self.is_constrained:
return self._space.constraint
return None
@property
def res(self):
return [r for r in self._space.res() if r["params"] not in self._dummies]

@property
def max(self):
return self._space.max()
results = [r for r in self.res if r.get("allowed", True)]
if not results:
return {"target": None, "params": None, "constraint": None}
return sorted(results, key=lambda x: x["target"])[-1]

@property
def res(self):
return self._space.res()

def register(self, params, target):
"""Expect observation with known target"""
self._space.register(params, target)
if params in self._dummies:
self._space.update(params, target)
self._dummies.remove(params)
else:
self._space.register(params, target)
self.dispatch(Events.OPTIMIZATION_STEP)

def register_dummy(self, params, default_value=0):
"""
Register a dummy observation, which is used as a placeholder.
Use register() to update once the true value is known.
Useful when doing parallel evaluations to prevent similar suggestions.
Target value will be temporarily registered with nearest target value.

Parameters
----------
params: dict or list
The parameters for which to register the observation

default_value: float, optional(default=0)
Default target value to use when nearest point is not available.
It seems that best to use expected max value.
"""
try:
closest = self._space.get_closest(params, self._dummies)
closest_value = self._space.probe(closest)
except ValueError:
closest_value = default_value
self._dummies.append(params)
self._space.register(params, closest_value)

def probe(self, params, lazy=True):
"""
Evaluates the function on the given points. Useful to guide the optimizer.
Expand Down
20 changes: 20 additions & 0 deletions bayes_opt/target_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,26 @@ def register(self, params, target):
self._params = np.concatenate([self._params, x.reshape(1, -1)])
self._target = np.concatenate([self._target, [target]])

def update(self, params, target):
"""Update an existing point with a new target value."""
x = self._as_array(params)
if x not in self:
raise KeyError("Can't find previous observation for {}, please use register() instead.".format(x))
# Get index of the point
idx = np.where((self._params == x).all(axis=1))[0][0]
self._target[idx] = target
self._cache[_hashable(x.ravel())] = target

def get_closest(self, params, exclude=None):
"""Return the closest params among already registered."""
x = self._as_array(params)
registered_params = self._params
if exclude:
exclude = np.array([self._as_array(e) for e in exclude])
registered_params = np.array([param for param in registered_params if param not in exclude])
idx = np.argmin(np.linalg.norm(registered_params - x, axis=1))
return self._params[idx]

def probe(self, params):
"""
Evaulates a single point x, to obtain the value y and then records them
Expand Down
28 changes: 28 additions & 0 deletions tests/test_dummy.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from bayes_opt.bayesian_optimization import BayesianOptimization
import pytest


def test_register_dummy():
optimizer = BayesianOptimization(f=None, pbounds={"x": (-1, 1)}, random_state=42)

optimizer.register_dummy({"x": 0}, default_value=0)
optimizer.register({"x": 0}, 1)
with pytest.raises(KeyError):
optimizer.register({"x": 0}, 1)

def test_res_with_dummy():
optimizer = BayesianOptimization(f=None, pbounds={"x": (-1, 1)}, random_state=42)

optimizer.register_dummy({"x": 0}, default_value=0)
optimizer.register({"x": -1}, -1)
assert len(optimizer.res) == 1


def test_max_with_dummy():
optimizer = BayesianOptimization(f=None, pbounds={"x": (-1, 1)}, random_state=42)

optimizer.register_dummy({"x": 0}, default_value=0)
optimizer.register({"x": -1}, -1)
best = optimizer.max
assert best["params"]["x"] == -1
assert best["target"] == -1