Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
57fbe94
dont raise_validation_error in cone examples
Osburg Feb 5, 2025
03ca220
formatting
Osburg Feb 5, 2025
c394769
add support for full evaluation of objectives in pytorch
Osburg Feb 16, 2025
847700b
completely switch to pytorch autograd and simplify code
Osburg Feb 17, 2025
679bd4f
make nonlinear constraints work without torch
Osburg Feb 22, 2025
85dc312
Merge branch 'main' into doe_pytorch_based_model_evaluation
Osburg Mar 12, 2025
450cdb8
remove outdated tests and fix others
Osburg Mar 12, 2025
c99a35a
add missing test
Osburg Mar 12, 2025
b016d66
Merge branch 'main' into doe_pytorch_based_model_evaluation
Osburg Mar 12, 2025
8e148c8
add support for exact (dense) hessians in doe, use cyipopt.Problem in…
Osburg Mar 30, 2025
13dd4ad
fix tests
Osburg Mar 30, 2025
156f73b
Merge branch 'main' into doe_pytorch_based_model_evaluation
Osburg Mar 30, 2025
e11ee74
fix tests
Osburg Mar 30, 2025
65c32fd
fix notebooks
Osburg Mar 30, 2025
829c313
next try :')
Osburg Mar 30, 2025
69a904a
lint
Osburg Mar 30, 2025
88ba22d
add fallback to scipy.minimize if cyipopt is not available
Osburg Apr 1, 2025
8f72707
use _minimize in IOptimalityCriterion
Osburg Apr 1, 2025
da2ab5a
make use_cyipopt user-definable, pass jacobian to scipy interface + s…
Osburg Apr 4, 2025
4ac0e9b
Add chemistry focus to documentation (#555)
TobyBoyne Apr 1, 2025
7d6fc73
Refactor ACQF Optimization (#535)
jduerholt Apr 2, 2025
0cf5326
Use Seed Sequences for random number generation (#566)
TobyBoyne Apr 2, 2025
4938c19
fix broken link in tutorials README.md (#564)
R-M-Lee Apr 4, 2025
d76fafa
Reintroduce sampling parameter into DoEStrategy to allow for seeding …
dlinzner-bcs Apr 4, 2025
e738982
run python 3.10 tests (#570)
bertiqwerty Apr 4, 2025
232b62b
fix test_functional_constraint()
Osburg Apr 6, 2025
3f42be3
Merge branch 'main' into doe_pytorch_based_model_evaluation
Osburg Apr 6, 2025
b9a8848
revert dumb changes that I made
Osburg Apr 6, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion bofire/data_models/constraints/interpoint.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import math
from typing import Annotated, Literal, Optional
from typing import Annotated, Dict, Literal, Optional, Union

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -83,3 +83,6 @@ def __call__(self, experiments: pd.DataFrame) -> pd.Series:

def jacobian(self, experiments: pd.DataFrame) -> pd.DataFrame:
raise NotImplementedError("Method `jacobian` currently not implemented.")

def hessian(self, experiments: pd.DataFrame) -> Dict[Union[str, int], pd.DataFrame]:
raise NotImplementedError("Method `hessian` currently not implemented.")
5 changes: 4 additions & 1 deletion bofire/data_models/constraints/linear.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Annotated, List, Literal, Tuple
from typing import Annotated, Dict, List, Literal, Tuple, Union

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -64,6 +64,9 @@ def jacobian(self, experiments: pd.DataFrame) -> pd.DataFrame:
columns=[f"dg/d{name}" for name in self.features],
)

def hessian(self, experiments: pd.DataFrame) -> Dict[Union[int, str], float]:
return {idx: 0.0 for idx in range(experiments.shape[0])}


class LinearEqualityConstraint(LinearConstraint, EqualityConstraint):
"""Linear equality constraint of the form `coefficients * x = rhs`.
Expand Down
5 changes: 4 additions & 1 deletion bofire/data_models/constraints/nchoosek.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Literal
from typing import Dict, Literal, Union

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -119,3 +119,6 @@ def is_fulfilled(self, experiments: pd.DataFrame, tol: float = 1e-6) -> pd.Serie

def jacobian(self, experiments: pd.DataFrame) -> pd.DataFrame:
raise NotImplementedError("Jacobian not implemented for NChooseK constraints.")

def hessian(self, experiments: pd.DataFrame) -> Dict[Union[str, int], pd.DataFrame]:
raise NotImplementedError("Hessian not implemented for NChooseK constraints.")
132 changes: 127 additions & 5 deletions bofire/data_models/constraints/nonlinear.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import inspect
import warnings
from typing import Callable, Literal, Optional, Union
from typing import Callable, Dict, Literal, Optional, Union

import numpy as np
import pandas as pd
Expand All @@ -9,6 +9,7 @@

try:
import torch
from torch.autograd.functional import hessian as torch_hessian
from torch.autograd.functional import jacobian as torch_jacobian

torch_tensor = torch.tensor
Expand All @@ -21,6 +22,7 @@ def error_func(*args, **kwargs):
torch_jacobian = error_func
torch_tensor = error_func
torch_diag = error_func
torch_hessian = error_func

from bofire.data_models.constraints.constraint import (
EqualityConstraint,
Expand All @@ -47,6 +49,9 @@ class NonlinearConstraint(IntrapointConstraint):
jacobian_expression: Optional[Union[str, Callable]] = Field(
default=None, validate_default=True
)
hessian_expression: Optional[Union[str, Callable]] = Field(
default=None, validate_default=True
)

def validate_inputs(self, inputs: Inputs):
if self.features is not None:
Expand Down Expand Up @@ -100,12 +105,53 @@ def set_jacobian_expression(cls, jacobian_expression, info) -> Union[str, Callab

return jacobian_expression

@field_validator("hessian_expression")
@classmethod
def set_hessian_expression(cls, hessian_expression, info) -> Union[str, Callable]:
if (
hessian_expression is None
and "features" in info.data.keys()
and "expression" in info.data.keys()
):
try:
import sympy
except ImportError as e:
warnings.warn(e.msg)
warnings.warn("please run `pip install sympy` for this functionality.")
return hessian_expression
if info.data["features"] is not None:
if isinstance(info.data["expression"], str):
return (
"["
+ ", ".join(
[
"["
+ ", ".join(
[
str(
sympy.S(info.data["expression"])
.diff(key1)
.diff(key2)
)
for key1 in info.data["features"]
]
)
+ "]"
for key2 in info.data["features"]
]
)
+ "]"
)

return hessian_expression

def __call__(self, experiments: pd.DataFrame) -> pd.Series:
if isinstance(self.expression, str):
return experiments.eval(self.expression)
elif isinstance(self.expression, Callable):
func_input = {
col: torch_tensor(experiments[col]) for col in experiments.columns
col: torch_tensor(experiments[col], requires_grad=False)
for col in experiments.columns
}
return pd.Series(self.expression(**func_input).cpu().numpy())

Expand All @@ -129,7 +175,10 @@ def jacobian(self, experiments: pd.DataFrame) -> pd.DataFrame:
elif isinstance(self.jacobian_expression, Callable):
args = inspect.getfullargspec(self.jacobian_expression).args

func_input = {arg: torch_tensor(experiments[arg]) for arg in args}
func_input = {
arg: torch_tensor(experiments[arg], requires_grad=False)
for arg in args
}
result = self.jacobian_expression(**func_input)

return pd.DataFrame(
Expand All @@ -141,12 +190,14 @@ def jacobian(self, experiments: pd.DataFrame) -> pd.DataFrame:
for col in experiments.columns
]
),
index=["dg/d" + name for name in args],
index=["dg/d" + name for name in experiments.columns],
).transpose()
elif isinstance(self.expression, Callable):
args = inspect.getfullargspec(self.expression).args

func_input = tuple([torch_tensor(experiments[arg]) for arg in args])
func_input = tuple(
[torch_tensor(experiments[arg], requires_grad=False) for arg in args]
)

result = torch_jacobian(self.expression, func_input)
result = [torch_diag(result[i]).cpu().numpy() for i in range(len(args))]
Expand All @@ -160,6 +211,77 @@ def jacobian(self, experiments: pd.DataFrame) -> pd.DataFrame:
"The jacobian of a nonlinear constraint cannot be evaluated if jacobian_expression is None and expression is not Callable.",
)

def hessian(self, experiments: pd.DataFrame) -> Dict[Union[str, int], pd.DataFrame]:
"""
Computes a dict of dataframes where the key dimension is the index of the experiments dataframe
and the value is the hessian matrix of the constraint evaluated at the corresponding experiment.

Args:
experiments (pd.DataFrame): Dataframe to evaluate the constraint Hessian on.

Returns:
Dict[pd.DataFrame]: Dictionary of dataframes where the key is the index of the experiments dataframe
and the value is the Hessian matrix of the constraint evaluated at the corresponding experiment.
"""
if self.hessian_expression is not None:
if isinstance(self.hessian_expression, str):
res = experiments.eval(self.hessian_expression)
else:
if not isinstance(self.hessian_expression, Callable):
raise ValueError(
"The hessian_expression must be a string or a callable.",
)
args = inspect.getfullargspec(self.hessian_expression).args

func_input = {
arg: torch_tensor(experiments[arg], requires_grad=False)
for arg in args
}
res = self.hessian_expression(**func_input)
for i, _ in enumerate(res):
for j, entry in enumerate(res[i]):
if not hasattr(entry, "__iter__"):
res[i][j] = pd.Series(np.repeat(entry, experiments.shape[0]))
res = np.array(res)
names = self.features or [f"x{i}" for i in range(experiments.shape[1])]

return {
idx: pd.DataFrame(
res[..., i],
columns=[f"d/d{name}" for name in names],
index=[f"d/d{name}" for name in names],
)
for i, idx in enumerate(experiments.index)
}

elif isinstance(self.expression, Callable):
args = inspect.getfullargspec(self.expression).args

func_input = {
idx: tuple(
[
torch_tensor(experiments[arg][idx], requires_grad=False)
for arg in args
]
)
for idx in experiments.index
}

names = self.features or [f"x{i}" for i in range(experiments.shape[1])]
res = {
idx: pd.DataFrame(
np.array(torch_hessian(self.expression, func_input[idx])),
columns=[f"d/d{name}" for name in names],
index=[f"d/d{name}" for name in names],
)
for idx in experiments.index
}
return res

raise ValueError(
"The hessian of a nonlinear constraint cannot be evaluated if hessian_expression is None and expression is not Callable.",
)


class NonlinearEqualityConstraint(NonlinearConstraint, EqualityConstraint):
"""Nonlinear equality constraint of the form 'expression == 0'.
Expand Down
5 changes: 5 additions & 0 deletions bofire/data_models/constraints/product.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,11 @@ def jacobian(self, experiments: pd.DataFrame) -> pd.DataFrame:
"Jacobian for product constraints is not yet implemented.",
)

def hessian(self, experiments: pd.DataFrame) -> List[pd.DataFrame]:
raise NotImplementedError(
"Hessian for product constraints is not yet implemented.",
)


class ProductEqualityConstraint(ProductConstraint, EqualityConstraint):
"""Represents a product constraint of the form `sign * x1**e1 * x2**e2 * ... * xn**en == rhs`.
Expand Down
2 changes: 2 additions & 0 deletions bofire/data_models/strategies/doe.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ class DoEStrategy(Strategy):

verbose: bool = False # get rid of this at a later stage
ipopt_options: Optional[Dict] = None
use_hessian: bool = False
use_cyipopt: Optional[bool] = None
sampling: Optional[List[List]] = None

def is_constraint_implemented(self, my_type: Type[Constraint]) -> bool:
Expand Down
14 changes: 14 additions & 0 deletions bofire/strategies/doe/branch_and_bound.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,8 @@ def find_local_max_ipopt_BaB(
categorical_groups: Optional[List[List[ContinuousInput]]] = None,
discrete_variables: Optional[Dict[str, Tuple[ContinuousInput, List[float]]]] = None,
verbose: bool = False,
use_hessian: bool = False,
use_cyipopt: Optional[bool] = None,
) -> pd.DataFrame:
"""Function computing a d-optimal design" for a given domain and model.
It allows for the problem to have categorical values which is solved by Branch-and-Bound
Expand All @@ -261,6 +263,9 @@ def find_local_max_ipopt_BaB(
verbose (bool): if true, print information during the optimization process
transform_range (Optional[Bounds]): range to which the input variables are transformed.
If None is provided, the features will not be scaled. Defaults to None.
use_hessian (bool): if true, the hessian will be used in the optimization process. Defaults to False.
use_cyipopt (Optional[bool]): if true, the cyipopt solver will be used, otherwise scipy.minimize(). Defaults to None.
If None is provided, the cyipopt solver will be used if available.
Returns:
A pd.DataFrame object containing the best found input for the experiments. In general, this is only a
local optimum.
Expand Down Expand Up @@ -324,6 +329,8 @@ def find_local_max_ipopt_BaB(
sampling,
None,
partially_fixed_experiments=initial_branch,
use_hessian=use_hessian,
use_cyipopt=use_cyipopt,
)
initial_value = objective_function.evaluate(
initial_design.to_numpy().flatten(),
Expand Down Expand Up @@ -367,6 +374,8 @@ def find_local_max_ipopt_exhaustive(
categorical_groups: Optional[List[List[ContinuousInput]]] = None,
discrete_variables: Optional[Dict[str, Tuple[ContinuousInput, List[float]]]] = None,
verbose: bool = False,
use_hessian: bool = False,
use_cyipopt: Optional[bool] = None,
) -> pd.DataFrame:
"""Function computing a d-optimal design" for a given domain and model.
It allows for the problem to have categorical values which is solved by exhaustive search
Expand All @@ -392,6 +401,9 @@ def find_local_max_ipopt_exhaustive(
with key:(relaxed variable, valid values). Defaults to None
verbose (bool): if true, print information during the optimization process
transform_range (Optional[Bounds]): range to which the input variables are transformed.
use_hessian (bool): if true, the hessian will be used in the optimization process. Defaults to False.
use_cyipopt (Optional[bool]): if true, the cyipopt solver will be used, otherwise scipy.minimize(). Defaults to None.
If None is provided, the cyipopt solver will be used if available.
Returns:
A pd.DataFrame object containing the best found input for the experiments. In general, this is only a
local optimum.
Expand Down Expand Up @@ -503,6 +515,8 @@ def find_local_max_ipopt_exhaustive(
sampling,
None,
one_set_of_experiments,
use_hessian=use_hessian,
use_cyipopt=use_cyipopt,
)
domain.validate_candidates(
candidates=current_design.apply(lambda x: np.round(x, 8)),
Expand Down
Loading
Loading