Skip to content

Commit

Permalink
Update nonlinear_constraint_is_feasible to return a boolean tensor
Browse files Browse the repository at this point in the history
Summary:
`nonlinear_constraint_is_feasible` checks whether the constraint is feasible for the given (batch of) candidates. Previously, this returned a boolean, which was False if any element of the batch was infeasible. This diff updates it to return a boolean tensor that  shows whether each batch is feasible. Use cases are updated to comply with the new behavior.

I'll utilize this in a follow up diff to introduce a helper that evaluates the feasibility of all forms of constraints.

Differential Revision: D69209007
  • Loading branch information
saitcakmak authored and facebook-github-bot committed Feb 5, 2025
1 parent c0db823 commit 250b262
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 20 deletions.
14 changes: 9 additions & 5 deletions botorch/generation/gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,17 +265,21 @@ def f(x):
# so it shouldn't be an issue given enough restarts.
if nonlinear_inequality_constraints:
for con, is_intrapoint in nonlinear_inequality_constraints:
if not nonlinear_constraint_is_feasible(
con, is_intrapoint=is_intrapoint, x=candidates
):
candidates = torch.from_numpy(x0).to(candidates).reshape(shapeX)
if not (
feasible := nonlinear_constraint_is_feasible(
con, is_intrapoint=is_intrapoint, x=candidates
)
).all():
# Replace the infeasible batches with feasible ICs.
candidates[~feasible] = (
torch.from_numpy(x0).to(candidates).reshape(shapeX)[~feasible]
)
warnings.warn(
"SLSQP failed to converge to a solution the satisfies the "
"non-linear constraints. Returning the feasible starting point.",
OptimizationWarning,
stacklevel=2,
)
break

clamped_candidates = columnwise_clamp(
X=candidates, lower=lower_bounds, upper=upper_bounds, raise_on_violation=True
Expand Down
21 changes: 11 additions & 10 deletions botorch/optim/parameter_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,7 +512,7 @@ def f_grad(X):

def nonlinear_constraint_is_feasible(
nonlinear_inequality_constraint: Callable, is_intrapoint: bool, x: Tensor
) -> bool:
) -> Tensor:
"""Checks if a nonlinear inequality constraint is fulfilled.
Args:
Expand All @@ -522,23 +522,24 @@ def nonlinear_constraint_is_feasible(
is applied pointwise and is broadcasted over the q-batch. Else, the
constraint has to evaluated over the whole q-batch and is a an
inter-point constraint.
x: Tensor of shape (b x q x d).
x: Tensor of shape (batch x q x d).
Returns:
bool: True if the constraint is fulfilled, else False.
A boolean (cpu) tensor of shape (batch) indicating if the constraint is
satified by the corresponding batch of `x`.
"""

def check_x(x: Tensor) -> bool:
return _arrayify(nonlinear_inequality_constraint(x)).item() >= NLC_TOL

for x_ in x:
x_flat = x.view(-1, *x.shape[-2:])
is_feasible = torch.ones(x_flat.shape[0], dtype=torch.bool)
for i, x_ in enumerate(x_flat):
if is_intrapoint:
if not all(check_x(x__) for x__ in x_):
return False
is_feasible[i] &= all(check_x(x__) for x__ in x_)
else:
if not check_x(x_):
return False
return True
is_feasible[i] &= check_x(x_)
return is_feasible.view(x.shape[:-2])


def make_scipy_nonlinear_inequality_constraints(
Expand Down Expand Up @@ -589,7 +590,7 @@ def make_scipy_nonlinear_inequality_constraints(
nlc, is_intrapoint = constraint
if not nonlinear_constraint_is_feasible(
nlc, is_intrapoint=is_intrapoint, x=x0.reshape(shapeX)
):
).all():
raise ValueError(
"`batch_initial_conditions` must satisfy the non-linear inequality "
"constraints."
Expand Down
12 changes: 7 additions & 5 deletions test/optim/test_parameter_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,15 +358,16 @@ def nlc(x):
),
)
)
self.assertFalse(
self.assertEqual(
nonlinear_constraint_is_feasible(
nlc,
True,
torch.tensor(
[[[1.5, 1.5], [1.5, 1.5]], [[1.5, 1.5], [1.5, 3.5]]],
device=self.device,
),
)
).tolist(),
[True, False],
)
self.assertTrue(
nonlinear_constraint_is_feasible(
Expand All @@ -381,22 +382,23 @@ def nlc(x):
[[[1.0, 1.0], [1.0, 1.0]], [[1.0, 1.0], [1.0, 1.0]]],
device=self.device,
),
)
).all()
)
self.assertFalse(
nonlinear_constraint_is_feasible(
nlc, False, torch.tensor([[[1.5, 1.5], [1.5, 1.5]]], device=self.device)
)
)
self.assertFalse(
self.assertEqual(
nonlinear_constraint_is_feasible(
nlc,
False,
torch.tensor(
[[[1.0, 1.0], [1.0, 1.0]], [[1.5, 1.5], [1.5, 1.5]]],
device=self.device,
),
)
).tolist(),
[True, False],
)

def test_generate_unfixed_nonlin_constraints(self):
Expand Down

0 comments on commit 250b262

Please sign in to comment.