|
38 | 38 | Optimization with Noisy Experiments. Bayesian Analysis, Bayesian Anal.
|
39 | 39 | 14(2), 495-519, 2019.
|
40 | 40 |
|
| 41 | +.. [Mishra2007] |
| 42 | + S. K. Mishra. Minimization of Keane's Bump Function by the Repulsive |
| 43 | + Particle Swarm and the Differential Evolution Methods (May 1, 2007). |
| 44 | + Available at SSRN: https://ssrn.com/abstract=983836. |
| 45 | +
|
41 | 46 | .. [Gramacy2016]
|
42 | 47 | R. Gramacy, G. Gray, S. Le Digabel, H. Lee, P. Ranjan, G. Wells & S. Wild.
|
43 | 48 | Modeling an Augmented Lagrangian for Blackbox Constrained Optimization,
|
@@ -1203,3 +1208,77 @@ def evaluate_slack_true(self, X: Tensor) -> Tensor:
|
1203 | 1208 | ],
|
1204 | 1209 | dim=-1,
|
1205 | 1210 | )
|
| 1211 | + |
| 1212 | + |
| 1213 | +class KeaneBumpFunction(ConstrainedSyntheticTestFunction): |
| 1214 | + r"""Keane Bump Function problem with constraints. |
| 1215 | +
|
| 1216 | + This is a challenging d-dimensional minimization problem with two |
| 1217 | + constraints that is evaluated on the domain [0, 10]^d. |
| 1218 | +
|
| 1219 | + There is no known global optimum for this problem, but the aproximate |
| 1220 | + global optimal value for a few different dimensionalities can be found in |
| 1221 | + [Mishra2007]_. |
| 1222 | + """ |
| 1223 | + |
| 1224 | + num_constraints = 2 |
| 1225 | + _optimal_value_lookup = { |
| 1226 | + 2: -0.365, |
| 1227 | + 10: -0.6737, |
| 1228 | + 15: -0.781647601, |
| 1229 | + 20: -0.803619104, |
| 1230 | + 30: -0.818056222, |
| 1231 | + 40: -0.826624404, |
| 1232 | + 50: -0.83078783, |
| 1233 | + } |
| 1234 | + |
| 1235 | + def __init__( |
| 1236 | + self, |
| 1237 | + dim: int, |
| 1238 | + noise_std: None | float = None, |
| 1239 | + constraint_noise_std: None | float | list[float] = None, |
| 1240 | + negate: bool = False, |
| 1241 | + bounds: list[tuple[float, float]] | None = None, |
| 1242 | + dtype: torch.dtype = torch.double, |
| 1243 | + ) -> None: |
| 1244 | + r""" |
| 1245 | + Args: |
| 1246 | + dim: The (input) dimension. |
| 1247 | + noise_std: Standard deviation of the observation noise. |
| 1248 | + constraint_noise_std: Standard deviation of the constraint noise. |
| 1249 | + If a list is provided, specifies separate noise standard |
| 1250 | + deviations for each constraint. |
| 1251 | + negate: If True, negate the function. |
| 1252 | + bounds: Custom bounds for the function specified as (lower, upper) pairs. |
| 1253 | + dtype: The dtype that is used for the bounds of the function. |
| 1254 | + """ |
| 1255 | + self.dim = dim |
| 1256 | + if bounds is None: |
| 1257 | + bounds = [(0.0, 10.0) for _ in range(dim)] |
| 1258 | + if dim in self._optimal_value_lookup.keys(): |
| 1259 | + self._optimal_value = self._optimal_value_lookup[dim] |
| 1260 | + super().__init__( |
| 1261 | + noise_std=noise_std, |
| 1262 | + constraint_noise_std=constraint_noise_std, |
| 1263 | + negate=negate, |
| 1264 | + bounds=bounds, |
| 1265 | + dtype=dtype, |
| 1266 | + ) |
| 1267 | + |
| 1268 | + def evaluate_true(self, X: Tensor) -> Tensor: |
| 1269 | + Xcos = X.cos() |
| 1270 | + num = Xcos.pow(4).sum(dim=-1) - 2 * Xcos.pow(2).prod(dim=-1) |
| 1271 | + den = torch.sqrt( |
| 1272 | + (torch.arange(1, self.dim + 1, device=X.device) * X.pow(2)).sum(dim=-1) |
| 1273 | + ) |
| 1274 | + # clamp to avoid den=0, which happens when X=0. |
| 1275 | + return -(num / den.clamp(min=1e-3)).abs() |
| 1276 | + |
| 1277 | + def evaluate_slack_true(self, X: Tensor) -> Tensor: |
| 1278 | + return torch.cat( |
| 1279 | + [ |
| 1280 | + X.prod(dim=-1, keepdims=True) - 0.75, |
| 1281 | + 7.5 * self.dim - X.sum(dim=-1, keepdims=True), |
| 1282 | + ], |
| 1283 | + dim=-1, |
| 1284 | + ) |
0 commit comments