Skip to content

Commit

Permalink
update tests, fix a bug and address typos
Browse files Browse the repository at this point in the history
  • Loading branch information
roussel-ryan committed Oct 29, 2024
1 parent d00a5a0 commit d55eedf
Show file tree
Hide file tree
Showing 5 changed files with 38 additions and 23 deletions.
24 changes: 14 additions & 10 deletions xopt/generators/bayesian/bayesian_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,10 +411,18 @@ def propose_candidates(self, model, n_candidates=1):
# get initial candidates to start acquisition function optimization
initial_points = self._get_initial_conditions(n_candidates)

# get candidates
candidates = self.numerical_optimizer.optimize(
acq_funct, bounds, n_candidates, batch_initial_conditions=initial_points
)
# get candidates -- grid optimizer does not support batch_initial_conditions
if isinstance(self.numerical_optimizer, GridOptimizer):
candidates = self.numerical_optimizer.optimize(
acq_funct, bounds, n_candidates
)
else:
candidates = self.numerical_optimizer.optimize(
acq_funct,
bounds,
n_candidates,
batch_initial_conditions=initial_points
)
return candidates

def get_training_data(self, data: pd.DataFrame) -> pd.DataFrame:
Expand Down Expand Up @@ -799,7 +807,7 @@ def _get_scaled_data(self):
weights = set_botorch_weights(self.vocs).to(**self._tkwargs)[
: self.vocs.n_objectives
]
return variable_data, objective_data * weights
return variable_data, objective_data * weights, weights

def calculate_hypervolume(self):
"""compute hypervolume given data"""
Expand All @@ -814,7 +822,7 @@ def calculate_hypervolume(self):

def get_pareto_front(self):
"""compute the pareto front x/y values given data"""
variable_data, objective_data = self._get_scaled_data()
variable_data, objective_data, weights = self._get_scaled_data()
obj_data = torch.vstack(
(self.torch_reference_point.unsqueeze(0), objective_data)
)
Expand All @@ -826,10 +834,6 @@ def get_pareto_front(self):
)
non_dominated = is_non_dominated(obj_data)

weights = set_botorch_weights(self.vocs).to(**self._tkwargs)[
: self.vocs.n_objectives
]

# note need to undo weights for real number output
# only return values if non nan values exist
if torch.all(torch.isnan(var_data[non_dominated])):
Expand Down
2 changes: 1 addition & 1 deletion xopt/generators/bayesian/mobo.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class MOBOGenerator(MultiObjectiveBayesianGenerator):
supports_batch_generation: bool = True
use_pf_as_initial_points: bool = Field(
False,
description="flag to specify if pf front points are to be used during "
description="flag to specify if pareto front points are to be used during "
"optimization of the acquisition function",
)
__doc__ = """Implements Multi-Objective Bayesian Optimization using the Expected
Expand Down
2 changes: 1 addition & 1 deletion xopt/numerical_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class GridOptimizer(NumericalOptimizer):
10, description="number of grid points per axis used for optimization"
)

def optimize(self, function, bounds, n_candidates=1, **kwargs):
def optimize(self, function, bounds, n_candidates=1):
assert isinstance(bounds, Tensor)
# create mesh
if len(bounds) != 2:
Expand Down
23 changes: 17 additions & 6 deletions xopt/tests/generators/bayesian/test_mobo.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ def test_initial_conditions(self):
reference_point=reference_point,
use_pf_as_initial_points=True,
)
gen.numerical_optimizer.max_time = 1.0
gen.add_data(test_data)
initial_points = gen._get_initial_conditions()

Expand Down Expand Up @@ -196,11 +197,11 @@ def test_initial_conditions(self):
vocs.constraints = {"c1": ["GREATER_THAN", 0.5]}
test_data = pd.DataFrame(
{
"x1": [0.1, 0.2, 0.4, 0.4],
"x2": [0.1, 0.2, 0.3, 0.2],
"y1": [1.0, 2.0, 1.0, 0.0],
"y2": [0.5, 0.1, 1.0, 1.5],
"c1": [1.0, 1.0, 1.0, 1.0],
"x1": [0.1, 0.2, 0.4, 0.4, 0.15],
"x2": [0.1, 0.2, 0.3, 0.2, 0.15],
"y1": [1.0, 2.0, 1.0, 0.0, 1.5],
"y2": [0.5, 0.1, 1.0, 1.5, 0.25],
"c1": [1.0, 1.0, 1.0, 1.0, 0.0],
}
)
gen = MOBOGenerator(
Expand All @@ -209,7 +210,17 @@ def test_initial_conditions(self):
use_pf_as_initial_points=True,
)
gen.add_data(test_data)
gen._get_initial_conditions()
gen.numerical_optimizer.max_time = 1.0

# make sure that no invalid points make it into the initial conditions
ic = gen._get_initial_conditions()
assert not torch.allclose(
ic[:4],
torch.tensor(
((0.1, 0.1), (0.2, 0.2), (0.4, 0.2), (0.15, 0.15))
).reshape(4, 1, 2).double()
)

gen.generate(1)

def test_log_mobo(self):
Expand Down
10 changes: 5 additions & 5 deletions xopt/vocs.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,12 +561,12 @@ def extract_data(self, data: pd.DataFrame, return_raw=False, return_valid=False)
observable_data = self.observable_data(data, "")

if return_valid:
feasable_status = self.feasibility_data(data)["feasible"]
feasible_status = self.feasibility_data(data)["feasible"]
return (
variable_data[feasable_status],
objective_data[feasable_status],
constraint_data[feasable_status],
observable_data[feasable_status],
variable_data[feasible_status],
objective_data[feasible_status],
constraint_data[feasible_status],
observable_data[feasible_status],
)

return variable_data, objective_data, constraint_data, observable_data
Expand Down

0 comments on commit d55eedf

Please sign in to comment.