Coverage for ebcpy/optimization.py: 79%

261 statements  

« prev     ^ index     » next       coverage.py v7.4.4, created at 2026-04-20 13:20 +0000

1"""Base-module for the whole optimization pacakge. 

2Used to define Base-Classes such as Optimizer and 

3Calibrator.""" 

4 

5import os 

6from pathlib import Path 

7import warnings 

8from typing import List, Tuple, Union 

9from collections import namedtuple 

10from abc import abstractmethod 

11import numpy as np 

12from ebcpy.utils import setup_logger 

13 

14 

15# pylint: disable=import-outside-toplevel 

16# pylint: disable=broad-except 

17 

18 

19class Optimizer: 

20 """ 

21 Base class for optimization in ebcpy. All classes 

22 performing optimization tasks must inherit from this 

23 class. 

24 The main feature of this class is the common interface 

25 for different available solvers in python. This makes the 

26 testing of different solvers and methods more easy. 

27 For available frameworks/solvers, check the function 

28 self.optimize(). 

29 

30 

31 :param str,Path working_directory: 

32 Directory for storing all output of optimization via a logger. 

33 :keyword list bounds: 

34 The boundaries for the optimization variables. 

35 """ 

36 

37 # Used to display number of obj-function-calls 

38 _counter = 0 

39 # Used to access the current parameter set if an optimization-step fails 

40 _current_iterate = np.array([]) 

41 # Used to access the best iterate if an optimization step fails 

42 _current_best_iterate = {"Objective": np.inf} 

43 # List storing every objective value for plotting and logging. 

44 # Can be used, but will enlarge runtime 

45 _obj_his = [] 

46 

47 def __init__(self, working_directory: Union[Path, str] = None, **kwargs): 

48 """Instantiate class parameters""" 

49 if working_directory is None and "cd" in kwargs: 

50 warnings.warn("cd was renamed to working_directory in all classes. Use working_directory instead.", 

51 category=DeprecationWarning) 

52 self.working_directory = kwargs["cd"] 

53 elif working_directory is None: 

54 self._working_directory = None 

55 else: 

56 self.working_directory = working_directory 

57 

58 self.logger = setup_logger(working_directory=self.working_directory, name=self.__class__.__name__) 

59 # Set kwargs 

60 self.bounds = kwargs.get("bounds", None) 

61 

62 @abstractmethod 

63 def obj(self, xk, *args): 

64 """ 

65 Base objective function. Overload this function and create your own 

66 objective function. Make sure that the return value is a scalar. 

67 Furthermore, the parameter vector xk is always a numpy array. 

68 

69 :param np.array xk: 

70 Array with parameters for optimization 

71 

72 :return: float result: 

73 A scalar (float/ 1d) value for the optimization framework. 

74 """ 

75 raise NotImplementedError(f'{self.__class__.__name__}.obj function is not defined') 

76 

77 @abstractmethod 

78 def mp_obj(self, x, *args): 

79 """ 

80 Objective function for Multiprocessing. 

81 

82 :param np.array x: 

83 Array with parameters for optimization. 

84 Shape of the array is (number_of_evaluations x number_of_variables). 

85 For instance, optimizating 10 variables and evaluating 

86 900 objectives in parallel, the shape would be 900 x 10. 

87 :param int n_cpu: 

88 Number of logical Processors to run optimization on. 

89 """ 

90 raise NotImplementedError(f'{self.__class__.__name__}.obj function is not defined') 

91 

92 @property 

93 def supported_frameworks(self): 

94 """ 

95 List with all frameworks supported by this 

96 wrapper class. 

97 """ 

98 return ["scipy_minimize", 

99 "scipy_differential_evolution", 

100 "dlib_minimize", 

101 "pymoo", 

102 "bayesian_optimization"] 

103 

104 @property 

105 def working_directory(self) -> Path: 

106 """The current working directory""" 

107 return self._working_directory 

108 

109 @working_directory.setter 

110 def working_directory(self, working_directory: Union[Path, str]): 

111 """Set current working directory""" 

112 if isinstance(working_directory, str): 

113 working_directory = Path(working_directory) 

114 os.makedirs(working_directory, exist_ok=True) 

115 self._working_directory = working_directory 

116 

117 @property 

118 def cd(self) -> Path: 

119 warnings.warn("cd was renamed to working_directory in all classes. " 

120 "Use working_directory instead instead.", 

121 category=DeprecationWarning) 

122 return self.working_directory 

123 

124 @cd.setter 

125 def cd(self, cd: Union[Path, str]): 

126 warnings.warn("cd was renamed to working_directory in all classes. " 

127 "Use working_directory instead instead.", 

128 category=DeprecationWarning) 

129 self.working_directory = cd 

130 

131 @property 

132 def bounds(self) -> List[Union[Tuple, List]]: 

133 """The boundaries of the optimization problem.""" 

134 return self._bounds 

135 

136 @bounds.setter 

137 def bounds(self, bounds): 

138 """Set the boundaries to the optimization variables""" 

139 self._bounds = bounds 

140 

141 def optimize(self, framework, method=None, n_cpu=1, **kwargs): 

142 """ 

143 Perform the optimization based on the given method and framework. 

144 

145 :param str framework: 

146 The framework (python module) you want to use to perform the optimization. 

147 Currently, "scipy_minimize", "dlib_minimize" and "scipy_differential_evolution" 

148 are supported options. To further inform yourself about these frameworks, please see: 

149 - `dlib <http://dlib.net/python/index.html>`_ 

150 - `scipy minimize <https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html>`_ 

151 - `scipy differential evolution <https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.differential_evolution.html>`_ 

152 - 'pymoo' <https://pymoo.org/index.html> 

153 :param str method: 

154 The method you pass depends on the methods available in the framework 

155 you chose when setting up the class. Some frameworks don't require a 

156 method, as only one exists. This is the case for dlib. For any framework 

157 with different methods, you must provide one. 

158 For the scipy.differential_evolution function, method is equal to the 

159 strategy. 

160 For the pymoo function, method is equal to the 

161 algorithm. 

162 :param int n_cpu: 

163 Number of parallel processes used for the evaluation. 

164 Ignored if the framework-method combination does not 

165 support multi-processing. 

166 

167 Keyword arguments: 

168 Depending on the framework an method you use, you can fine-tune the 

169 optimization tool using extra arguments. We refer to the documentation of 

170 each framework for a listing of what parameters are supported and how 

171 to set them. 

172 E.g. For scipy.optimize.minimize one could 

173 add "tol=1e-3" as a kwarg. 

174 

175 :return: res 

176 Optimization result. 

177 """ 

178 # Choose the framework 

179 minimize_func, requires_method = self._choose_framework(framework) 

180 if method is None and requires_method: 

181 raise ValueError(f"{framework} requires a method, but None is " 

182 f"provided. Please choose one.") 

183 # Perform minimization 

184 res = minimize_func(method=method, n_cpu=n_cpu, **kwargs) 

185 return res 

186 

187 def _choose_framework(self, framework): 

188 """ 

189 Function to select the functions for optimization 

190 and for executing said functions. 

191 

192 :param str framework: 

193 String for selection of the relevant function. Supported options are: 

194 - scipy_minimize 

195 - dlib_minimize 

196 - scipy_differential_evolution 

197 - pymoo 

198 """ 

199 if framework.lower() == "scipy_minimize": 

200 return self._scipy_minimize, True 

201 if framework.lower() == "dlib_minimize": 

202 return self._dlib_minimize, False 

203 if framework.lower() == "scipy_differential_evolution": 

204 return self._scipy_differential_evolution, True 

205 if framework.lower() == "pymoo": 

206 return self._pymoo, True 

207 if framework.lower() == "bayesian_optimization": 

208 return self._bayesian_optimization, False 

209 

210 raise TypeError(f"Given framework {framework} is currently not supported.") 

211 

212 def _bayesian_optimization(self, method=None, n_cpu=1, **kwargs): 

213 """ 

214 Possible kwargs for the bayesian_optimization function with default values: 

215 

216 random_state = 42 

217 allow_duplicate_points = True 

218 init_points = 5 

219 n_iter = 25 

220 kind_of_utility_function = "ei" 

221 xi = 0.1 

222 kappa = 2.576 

223 verbose = False 

224 

225 For an explanation of what the parameters do, we refer to the documentation of 

226 the bayesian optimization package: 

227 https://bayesian-optimization.github.io/BayesianOptimization/index.html 

228 

229 Additionally, a pre-built acquisition function instance can be passed via 

230 the ``acquisition_function`` kwarg. If given, it takes precedence over 

231 ``kind_of_utility_function``/``xi``/``kappa``. 

232 """ 

233 default_kwargs = self.get_default_config(framework="bayesian_optimization") 

234 default_kwargs.update(kwargs) 

235 

236 if "allow_dublicate_points" in default_kwargs: 

237 warnings.warn( 

238 "'allow_dublicate_points' is a typo and deprecated. " 

239 "Use 'allow_duplicate_points' instead.", 

240 FutureWarning, 

241 stacklevel=2, 

242 ) 

243 default_kwargs.setdefault( 

244 "allow_duplicate_points", 

245 default_kwargs.pop("allow_dublicate_points"), 

246 ) 

247 

248 try: 

249 from bayes_opt import BayesianOptimization 

250 except ImportError as error: 

251 raise ImportError("Please install bayesian-optimization to use " 

252 "the bayesian_optimization function.") from error 

253 

254 # Figure out which API version we're dealing with. In >=2.0 the old 

255 # UtilityFunction was removed and replaced by the acquisition-function 

256 # classes in bayes_opt.acquisition. 

257 try: 

258 from bayes_opt import acquisition as _bo_acquisition 

259 _new_api = True 

260 except ImportError: 

261 _bo_acquisition = None 

262 _new_api = False 

263 

264 try: 

265 if self.bounds is None: 

266 raise ValueError("For the bayesian optimization approach, you need to specify " 

267 "boundaries. Currently, no bounds are specified.") 

268 

269 pbounds = {f"x{n}": i for n, i in enumerate(self.bounds)} 

270 

271 # Resolve the acquisition function. Users can pass a ready-made instance 

272 # via `acquisition_function` (new-API only); otherwise build it from 

273 # `kind_of_utility_function` + `xi`/`kappa`. 

274 acq_function_instance = default_kwargs.get("acquisition_function", None) 

275 kind = default_kwargs["kind_of_utility_function"].lower() 

276 xi = default_kwargs["xi"] 

277 kappa = default_kwargs["kappa"] 

278 random_state = default_kwargs["random_state"] 

279 

280 if _new_api: 

281 if acq_function_instance is None: 

282 if kind in ("ei", "expected_improvement"): 

283 acq_function_instance = _bo_acquisition.ExpectedImprovement(xi=xi) 

284 elif kind in ("poi", "probability_of_improvement"): 

285 acq_function_instance = _bo_acquisition.ProbabilityOfImprovement(xi=xi) 

286 elif kind in ("ucb", "upper_confidence_bound"): 

287 acq_function_instance = _bo_acquisition.UpperConfidenceBound(kappa=kappa) 

288 else: 

289 raise ValueError( 

290 f"Unknown kind_of_utility_function '{kind}'. " 

291 "Supported: 'ei', 'poi', 'ucb' (or pass a ready-made " 

292 "instance via the 'acquisition_function' kwarg)." 

293 ) 

294 

295 optimizer = BayesianOptimization( 

296 f=self._bayesian_opt_obj, 

297 pbounds=pbounds, 

298 acquisition_function=acq_function_instance, 

299 random_state=random_state, 

300 allow_duplicate_points=default_kwargs["allow_duplicate_points"], 

301 verbose=default_kwargs["verbose"], 

302 ) 

303 

304 gp = default_kwargs.get("gp", None) 

305 if gp is not None: 

306 optimizer._gp = gp 

307 

308 optimizer.maximize( 

309 init_points=default_kwargs["init_points"], 

310 n_iter=default_kwargs["n_iter"], 

311 ) 

312 else: 

313 # Legacy path for bayesian-optimization < 2.0 

314 warnings.warn( 

315 "You are using bayesian-optimization < 2.0. Support for this " 

316 "version is deprecated and will be removed in the future. " 

317 "Please upgrade to bayesian-optimization >= 2.0.", 

318 FutureWarning, 

319 stacklevel=2, 

320 ) 

321 from bayes_opt.util import UtilityFunction 

322 

323 optimizer = BayesianOptimization( 

324 f=self._bayesian_opt_obj, 

325 pbounds=pbounds, 

326 random_state=random_state, 

327 allow_duplicate_points=default_kwargs["allow_duplicate_points"], 

328 verbose=default_kwargs["verbose"], 

329 ) 

330 

331 gp = default_kwargs.get("gp", None) 

332 if gp is not None: 

333 optimizer._gp = gp 

334 

335 acq_function = UtilityFunction(kind=kind, xi=xi, kappa=kappa) 

336 optimizer.maximize( 

337 init_points=default_kwargs["init_points"], 

338 n_iter=default_kwargs["n_iter"], 

339 acquisition_function=acq_function, 

340 ) 

341 

342 res = optimizer.max 

343 x_res = np.array(list(res["params"].values())) 

344 f_res = -res["target"] 

345 res_tuple = namedtuple("res_tuple", "x fun") 

346 res = res_tuple(x=x_res, fun=f_res) 

347 return res 

348 except (KeyboardInterrupt, Exception) as error: 

349 # pylint: disable=inconsistent-return-statements 

350 self._handle_error(error) 

351 

352 def _bayesian_opt_obj(self, **kwargs): 

353 """ 

354 This function is needed as the signature for the Bayesian-optimization 

355 is different than the standard signature. The Bayesian-optimization gives keyword arguments for 

356 every parameter and only maximizes, therefore we will maximize the negative objective function value. 

357 """ 

358 xk = np.array(list(kwargs.values())) 

359 return -self.obj(xk) 

360 

361 def _scipy_minimize(self, method, n_cpu=1, **kwargs): 

362 """ 

363 Possible kwargs for the scipy minimize function with default values: 

364 

365 x0: Required 

366 tol = None 

367 options = None 

368 constraints = {} 

369 jac = None 

370 hess = None 

371 hessp = None 

372 """ 

373 default_kwargs = self.get_default_config(framework="scipy_minimize") 

374 default_kwargs.update(kwargs) 

375 try: 

376 import scipy.optimize as opt 

377 except ImportError as error: 

378 raise ImportError("Please install scipy to use " 

379 "the minimize_scipy function.") from error 

380 

381 try: 

382 if "x0" not in kwargs: 

383 raise KeyError("An initial guess (x0) is required " 

384 "for scipy.minimize. You passed None") 

385 res = opt.minimize( 

386 fun=self.obj, 

387 x0=kwargs["x0"], 

388 method=method, 

389 jac=default_kwargs["jac"], 

390 hess=default_kwargs["hess"], 

391 hessp=default_kwargs["hessp"], 

392 bounds=self.bounds, 

393 constraints=default_kwargs["constraints"], 

394 tol=default_kwargs["tol"], 

395 options=default_kwargs["options"] 

396 ) 

397 return res 

398 except (KeyboardInterrupt, Exception) as error: 

399 # pylint: disable=inconsistent-return-statements 

400 self._handle_error(error) 

401 

402 def _dlib_minimize(self, method=None, n_cpu=1, **kwargs): 

403 """ 

404 Possible kwargs for the dlib minimize function with default values: 

405 

406 is_integer_variable = None 

407 solver_epsilon = 0 

408 num_function_calls = int(1e9) 

409 """ 

410 default_kwargs = self.get_default_config(framework="dlib_minimize") 

411 default_kwargs.update(kwargs) 

412 try: 

413 import dlib 

414 except ImportError as error: 

415 raise ImportError("Please install dlib to use the minimize_dlib function.") from error 

416 try: 

417 _bounds_2d = np.array(self.bounds) 

418 _bound_min = list(_bounds_2d[:, 0]) 

419 _bound_max = list(_bounds_2d[:, 1]) 

420 if "is_integer_variable" not in kwargs: 

421 is_integer_variable = list(np.zeros(len(_bound_max))) 

422 else: 

423 is_integer_variable = kwargs["is_integer_variable"] 

424 

425 # This check is only necessary as the error-messages from dlib are quite indirect. 

426 # Any new user would not get that these parameters cause the error. 

427 for key in ["solver_epsilon", "num_function_calls"]: 

428 value = kwargs.get(key) 

429 if value is not None: 

430 if not isinstance(value, (float, int)): 

431 raise TypeError( 

432 f"Given {key} is of type {type(value).__name__} but " 

433 f"should be type float or int" 

434 ) 

435 

436 x_res, f_res = dlib.find_min_global( 

437 f=self._dlib_obj, 

438 bound1=_bound_min, 

439 bound2=_bound_max, 

440 is_integer_variable=is_integer_variable, 

441 num_function_calls=int(default_kwargs["num_function_calls"]), 

442 solver_epsilon=float(default_kwargs["solver_epsilon"]) 

443 ) 

444 res_tuple = namedtuple("res_tuple", "x fun") 

445 res = res_tuple(x=x_res, fun=f_res) 

446 return res 

447 except (KeyboardInterrupt, Exception) as error: 

448 # pylint: disable=inconsistent-return-statements 

449 self._handle_error(error) 

450 

451 def _scipy_differential_evolution(self, method="best1bin", n_cpu=1, **kwargs): 

452 """ 

453 Possible kwargs for the dlib minimize function with default values: 

454 

455 maxiter = 1000 

456 popsize = 15 

457 tol = None 

458 mutation = (0.5, 1) 

459 recombination = 0.7 

460 seed = None 

461 polish = True 

462 init = 'latinhypercube' 

463 atol = 0 

464 """ 

465 default_kwargs = self.get_default_config(framework="scipy_differential_evolution") 

466 default_kwargs.update(kwargs) 

467 try: 

468 import scipy.optimize as opt 

469 except ImportError as error: 

470 raise ImportError("Please install scipy to use the minimize_scipy function.") from error 

471 

472 try: 

473 if self.bounds is None: 

474 raise ValueError("For the differential evolution approach, you need to specify " 

475 "boundaries. Currently, no bounds are specified.") 

476 

477 res = opt.differential_evolution( 

478 func=self.obj, 

479 bounds=self.bounds, 

480 strategy=method, 

481 maxiter=default_kwargs["maxiter"], 

482 popsize=default_kwargs["popsize"], 

483 tol=default_kwargs["tol"], 

484 mutation=default_kwargs["mutation"], 

485 recombination=default_kwargs["recombination"], 

486 seed=default_kwargs["seed"], 

487 disp=False, # We have our own logging 

488 polish=default_kwargs["polish"], 

489 init=default_kwargs["init"], 

490 atol=default_kwargs["atol"] 

491 ) 

492 return res 

493 except (KeyboardInterrupt, Exception) as error: 

494 # pylint: disable=inconsistent-return-statements 

495 self._handle_error(error) 

496 

497 def _pymoo(self, method="GA", n_cpu=1, **kwargs): 

498 """ 

499 Possible kwargs for the dlib minimize function with default values: 

500 

501 algorithm=NGSA2 

502 termination=None 

503 seed=None 

504 verbose=False 

505 display=None 

506 callback=None 

507 save_history=False 

508 copy_algorithm=False 

509 copy_termination=False 

510 """ 

511 default_kwargs = self.get_default_config(framework="pymoo") 

512 default_kwargs.update(kwargs) 

513 

514 try: 

515 from pymoo.optimize import minimize 

516 from pymoo.problems.single import Problem 

517 from pymoo.algorithms.moo.ctaea import CTAEA 

518 from pymoo.algorithms.moo.moead import MOEAD 

519 from pymoo.algorithms.moo.nsga2 import NSGA2 

520 from pymoo.algorithms.moo.nsga3 import NSGA3 

521 from pymoo.algorithms.moo.rnsga2 import RNSGA2 

522 from pymoo.algorithms.moo.rnsga3 import RNSGA3 

523 from pymoo.algorithms.soo.nonconvex.de import DE 

524 from pymoo.algorithms.soo.nonconvex.ga import GA 

525 from pymoo.algorithms.moo.unsga3 import UNSGA3 

526 from pymoo.algorithms.soo.nonconvex.brkga import BRKGA 

527 from pymoo.algorithms.soo.nonconvex.pso import PSO 

528 

529 except ImportError as error: 

530 raise ImportError("Please install pymoo to use this function.") from error 

531 pymoo_version_greater_050 = True 

532 try: 

533 from pymoo.factory import get_sampling, get_mutation, get_crossover, get_selection 

534 from pymoo.algorithms.soo.nonconvex.nelder_mead import NelderMead 

535 from pymoo.algorithms.soo.nonconvex.pattern_search import PatternSearch 

536 pymoo_version_greater_050 = False 

537 except ImportError as error: 

538 from pymoo.algorithms.soo.nonconvex.nelder import NelderMead 

539 from pymoo.algorithms.soo.nonconvex.pattern import PatternSearch 

540 

541 pymoo_algorithms = { 

542 "ga": GA, 

543 "brkga": BRKGA, 

544 "de": DE, 

545 "nelder-mead": NelderMead, 

546 "pattern-search": PatternSearch, 

547 "pso": PSO, 

548 "nsga2": NSGA2, 

549 "rnsga2": RNSGA2, 

550 "nsga3": NSGA3, 

551 "unsga3": UNSGA3, 

552 "rnsga3": RNSGA3, 

553 "moead": MOEAD, 

554 "ctaea": CTAEA, 

555 } 

556 

557 if method.lower() not in pymoo_algorithms: 

558 raise ValueError(f"Given method {method} is currently not supported. Please choose one of the " 

559 "following: " + ", ".join(pymoo_algorithms.keys())) 

560 

561 class EBCPYProblem(Problem): 

562 """Construct wrapper problem class.""" 

563 

564 def __init__(self, 

565 ebcpy_class: Optimizer 

566 ): 

567 self.ebcpy_class = ebcpy_class 

568 super().__init__(n_var=len(ebcpy_class.bounds), 

569 n_obj=1, 

570 n_constr=0, 

571 xl=np.array([bound[0] for bound in ebcpy_class.bounds]), 

572 xu=np.array([bound[1] for bound in ebcpy_class.bounds]) 

573 ) 

574 

575 def _evaluate(self, x, out, *args, **kwargs): 

576 if n_cpu > 1: 

577 out["F"] = self.ebcpy_class.mp_obj(x, n_cpu, *args) 

578 else: 

579 out["F"] = np.array([self.ebcpy_class.obj(xk=_x, *args) for _x in x]) 

580 

581 try: 

582 if self.bounds is None: 

583 raise ValueError("For pymoo, you need to specify " 

584 "boundaries. Currently, no bounds are specified.") 

585 

586 termination = default_kwargs.pop("termination") 

587 if termination is None: 

588 termination = ("n_gen", default_kwargs.pop("n_gen")) 

589 seed = default_kwargs.pop("seed") 

590 verbose = default_kwargs.pop("verbose") 

591 save_history = default_kwargs.pop("save_history") 

592 copy_algorithm = default_kwargs.pop("copy_algorithm") 

593 copy_termination = default_kwargs.pop("copy_termination") 

594 callback = default_kwargs.pop("callback") 

595 display = default_kwargs.pop("display") 

596 

597 if not pymoo_version_greater_050: 

598 

599 keys_to_check = ["selection", "crossover", "sampling", "mutation"] 

600 if any(isinstance(default_kwargs.get(k), str) for k in keys_to_check): 

601 warnings.warn( 

602 "Support for pymoo<0.6 string arguments is deprecated and will be removed in the future. " 

603 "Please import the classes yourself and pass the objects directly to the kwargs.", 

604 FutureWarning, 

605 stacklevel=2, 

606 ) 

607 

608 if "selection" in default_kwargs.keys(): 

609 default_kwargs["selection"] = get_selection(name=default_kwargs["selection"]) 

610 if "crossover" in default_kwargs.keys(): 

611 default_kwargs["crossover"] = get_crossover(name=default_kwargs["crossover"]) 

612 if "sampling" in default_kwargs.keys(): 

613 default_kwargs["sampling"] = get_sampling(name=default_kwargs["sampling"]) 

614 if "mutation" in default_kwargs.keys(): 

615 default_kwargs["mutation"] = get_mutation(name=default_kwargs["mutation"]) 

616 algorithm = pymoo_algorithms[method.lower()](**default_kwargs) 

617 

618 minimize_kwargs = { 

619 "problem": EBCPYProblem(ebcpy_class=self), 

620 "algorithm": algorithm, 

621 "termination": termination, 

622 "seed": seed, 

623 "verbose": verbose, 

624 "save_history": save_history, 

625 "copy_algorithm": copy_algorithm, 

626 "copy_termination": copy_termination, 

627 } 

628 

629 if callback is not None: 

630 minimize_kwargs["callback"] = callback 

631 if display is not None: 

632 minimize_kwargs["display"] = display 

633 

634 res = minimize(**minimize_kwargs) 

635 res_tuple = namedtuple("res_tuple", "x fun") 

636 res = res_tuple(x=res.X, fun=res.F[0]) 

637 return res 

638 except (KeyboardInterrupt, Exception) as error: 

639 # pylint: disable=inconsistent-return-statements 

640 self._handle_error(error) 

641 

642 def _dlib_obj(self, *args): 

643 """ 

644 This function is needed as the signature for the dlib-obj 

645 is different than the standard signature. dlib will parse a number of 

646 parameters 

647 """ 

648 return self.obj(np.array(args)) 

649 

650 def _handle_error(self, error): 

651 """ 

652 Function to handle the case when an optimization step fails (e.g. simulation-fail). 

653 The parameter set which caused the failure and the best iterate until this point 

654 are of interest for the user in such case. 

655 :param error: 

656 Any Exception that may occur 

657 """ 

658 self.logger.error(f"Parameter set which caused the failure: {self._current_iterate}") 

659 self.logger.error("Current best objective and parameter set:") 

660 self.logger.error("\n".join([f"{key}: {value}" 

661 for key, value in self._current_best_iterate.items()])) 

662 raise error 

663 

664 @staticmethod 

665 def get_default_config(framework: str) -> dict: 

666 """ 

667 Return the default config or kwargs for the 

668 given framework. 

669 

670 The default values are extracted of the corresponding 

671 framework directly. 

672 """ 

673 if framework.lower() == "scipy_minimize": 

674 return {"tol": None, 

675 "options": None, 

676 "constraints": None, 

677 "jac": None, 

678 "hess": None, 

679 "hessp": None} 

680 if framework.lower() == "dlib_minimize": 

681 return {"num_function_calls": int(1e9), 

682 "solver_epsilon": 0} 

683 if framework.lower() == "scipy_differential_evolution": 

684 return {"maxiter": 1000, 

685 "popsize": 15, 

686 "tol": 0.01, 

687 "mutation": (0.5, 1), 

688 "recombination": 0.7, 

689 "seed": None, 

690 "polish": True, 

691 "init": 'latinhypercube', 

692 "atol": 0 

693 } 

694 if framework.lower() == "pymoo": 

695 return {"n_gen": 1000, 

696 "termination": None, 

697 "seed": 1, 

698 "verbose": False, 

699 "display": None, 

700 "callback": None, 

701 "save_history": False, 

702 "copy_algorithm": False, 

703 "copy_termination": False 

704 } 

705 if framework.lower() == "bayesian_optimization": 

706 return {"random_state": 42, 

707 "allow_duplicate_points": True, 

708 "init_points": 5, 

709 "n_iter": 25, 

710 "kind_of_utility_function": "ei", 

711 "xi": 0.1, 

712 "kappa": 2.576, 

713 "verbose": False, 

714 } 

715 return {}