Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/scipy/optimize/_shgo.py: 10%

617 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-12-12 06:31 +0000

1""" 

2shgo: The simplicial homology global optimisation algorithm 

3""" 

4 

5import numpy as np 

6import time 

7import logging 

8import warnings 

9from scipy import spatial 

10from scipy.optimize import OptimizeResult, minimize, Bounds 

11from scipy.optimize._constraints import new_bounds_to_old 

12from ._optimize import _wrap_scalar_function 

13from scipy.optimize._shgo_lib.triangulation import Complex 

14 

15 

16__all__ = ['shgo'] 

17 

18 

19def shgo(func, bounds, args=(), constraints=None, n=None, iters=1, 

20 callback=None, 

21 minimizer_kwargs=None, options=None, sampling_method='simplicial'): 

22 """ 

23 Finds the global minimum of a function using SHG optimization. 

24 

25 SHGO stands for "simplicial homology global optimization". 

26 

27 Parameters 

28 ---------- 

29 func : callable 

30 The objective function to be minimized. Must be in the form 

31 ``f(x, *args)``, where ``x`` is the argument in the form of a 1-D array 

32 and ``args`` is a tuple of any additional fixed parameters needed to 

33 completely specify the function. 

34 bounds : sequence or `Bounds` 

35 Bounds for variables. There are two ways to specify the bounds: 

36 

37 1. Instance of `Bounds` class. 

38 2. Sequence of ``(min, max)`` pairs for each element in `x`. 

39 

40 args : tuple, optional 

41 Any additional fixed parameters needed to completely specify the 

42 objective function. 

43 constraints : dict or sequence of dict, optional 

44 Constraints definition. 

45 Function(s) ``R**n`` in the form:: 

46 

47 g(x) >= 0 applied as g : R^n -> R^m 

48 h(x) == 0 applied as h : R^n -> R^p 

49 

50 Each constraint is defined in a dictionary with fields: 

51 

52 type : str 

53 Constraint type: 'eq' for equality, 'ineq' for inequality. 

54 fun : callable 

55 The function defining the constraint. 

56 jac : callable, optional 

57 The Jacobian of `fun` (only for SLSQP). 

58 args : sequence, optional 

59 Extra arguments to be passed to the function and Jacobian. 

60 

61 Equality constraint means that the constraint function result is to 

62 be zero whereas inequality means that it is to be non-negative. 

63 Note that COBYLA only supports inequality constraints. 

64 

65 .. note:: 

66 

67 Only the COBYLA and SLSQP local minimize methods currently 

68 support constraint arguments. If the ``constraints`` sequence 

69 used in the local optimization problem is not defined in 

70 ``minimizer_kwargs`` and a constrained method is used then the 

71 global ``constraints`` will be used. 

72 (Defining a ``constraints`` sequence in ``minimizer_kwargs`` 

73 means that ``constraints`` will not be added so if equality 

74 constraints and so forth need to be added then the inequality 

75 functions in ``constraints`` need to be added to 

76 ``minimizer_kwargs`` too). 

77 

78 n : int, optional 

79 Number of sampling points used in the construction of the simplicial 

80 complex. Note that this argument is only used for ``sobol`` and other 

81 arbitrary `sampling_methods`. In case of ``sobol``, it must be a 

82 power of 2: ``n=2**m``, and the argument will automatically be 

83 converted to the next higher power of 2. Default is 100 for 

84 ``sampling_method='simplicial'`` and 128 for 

85 ``sampling_method='sobol'``. 

86 iters : int, optional 

87 Number of iterations used in the construction of the simplicial 

88 complex. Default is 1. 

89 callback : callable, optional 

90 Called after each iteration, as ``callback(xk)``, where ``xk`` is the 

91 current parameter vector. 

92 minimizer_kwargs : dict, optional 

93 Extra keyword arguments to be passed to the minimizer 

94 ``scipy.optimize.minimize`` Some important options could be: 

95 

96 * method : str 

97 The minimization method, the default is ``SLSQP``. 

98 * args : tuple 

99 Extra arguments passed to the objective function (``func``) and 

100 its derivatives (Jacobian, Hessian). 

101 * options : dict, optional 

102 Note that by default the tolerance is specified as 

103 ``{ftol: 1e-12}`` 

104 

105 options : dict, optional 

106 A dictionary of solver options. Many of the options specified for the 

107 global routine are also passed to the scipy.optimize.minimize routine. 

108 The options that are also passed to the local routine are marked with 

109 "(L)". 

110 

111 Stopping criteria, the algorithm will terminate if any of the specified 

112 criteria are met. However, the default algorithm does not require any to 

113 be specified: 

114 

115 * maxfev : int (L) 

116 Maximum number of function evaluations in the feasible domain. 

117 (Note only methods that support this option will terminate 

118 the routine at precisely exact specified value. Otherwise the 

119 criterion will only terminate during a global iteration) 

120 * f_min 

121 Specify the minimum objective function value, if it is known. 

122 * f_tol : float 

123 Precision goal for the value of f in the stopping 

124 criterion. Note that the global routine will also 

125 terminate if a sampling point in the global routine is 

126 within this tolerance. 

127 * maxiter : int 

128 Maximum number of iterations to perform. 

129 * maxev : int 

130 Maximum number of sampling evaluations to perform (includes 

131 searching in infeasible points). 

132 * maxtime : float 

133 Maximum processing runtime allowed 

134 * minhgrd : int 

135 Minimum homology group rank differential. The homology group of the 

136 objective function is calculated (approximately) during every 

137 iteration. The rank of this group has a one-to-one correspondence 

138 with the number of locally convex subdomains in the objective 

139 function (after adequate sampling points each of these subdomains 

140 contain a unique global minimum). If the difference in the hgr is 0 

141 between iterations for ``maxhgrd`` specified iterations the 

142 algorithm will terminate. 

143 

144 Objective function knowledge: 

145 

146 * symmetry : bool 

147 Specify True if the objective function contains symmetric variables. 

148 The search space (and therefore performance) is decreased by O(n!). 

149 

150 * jac : bool or callable, optional 

151 Jacobian (gradient) of objective function. Only for CG, BFGS, 

152 Newton-CG, L-BFGS-B, TNC, SLSQP, dogleg, trust-ncg. If ``jac`` is a 

153 boolean and is True, ``fun`` is assumed to return the gradient along 

154 with the objective function. If False, the gradient will be 

155 estimated numerically. ``jac`` can also be a callable returning the 

156 gradient of the objective. In this case, it must accept the same 

157 arguments as ``fun``. (Passed to `scipy.optimize.minmize` automatically) 

158 

159 * hess, hessp : callable, optional 

160 Hessian (matrix of second-order derivatives) of objective function 

161 or Hessian of objective function times an arbitrary vector p. 

162 Only for Newton-CG, dogleg, trust-ncg. Only one of ``hessp`` or 

163 ``hess`` needs to be given. If ``hess`` is provided, then 

164 ``hessp`` will be ignored. If neither ``hess`` nor ``hessp`` is 

165 provided, then the Hessian product will be approximated using 

166 finite differences on ``jac``. ``hessp`` must compute the Hessian 

167 times an arbitrary vector. (Passed to `scipy.optimize.minmize` 

168 automatically) 

169 

170 Algorithm settings: 

171 

172 * minimize_every_iter : bool 

173 If True then promising global sampling points will be passed to a 

174 local minimization routine every iteration. If False then only the 

175 final minimizer pool will be run. Defaults to False. 

176 * local_iter : int 

177 Only evaluate a few of the best minimizer pool candidates every 

178 iteration. If False all potential points are passed to the local 

179 minimization routine. 

180 * infty_constraints: bool 

181 If True then any sampling points generated which are outside will 

182 the feasible domain will be saved and given an objective function 

183 value of ``inf``. If False then these points will be discarded. 

184 Using this functionality could lead to higher performance with 

185 respect to function evaluations before the global minimum is found, 

186 specifying False will use less memory at the cost of a slight 

187 decrease in performance. Defaults to True. 

188 

189 Feedback: 

190 

191 * disp : bool (L) 

192 Set to True to print convergence messages. 

193 

194 sampling_method : str or function, optional 

195 Current built in sampling method options are ``halton``, ``sobol`` and 

196 ``simplicial``. The default ``simplicial`` provides 

197 the theoretical guarantee of convergence to the global minimum in finite 

198 time. ``halton`` and ``sobol`` method are faster in terms of sampling 

199 point generation at the cost of the loss of 

200 guaranteed convergence. It is more appropriate for most "easier" 

201 problems where the convergence is relatively fast. 

202 User defined sampling functions must accept two arguments of ``n`` 

203 sampling points of dimension ``dim`` per call and output an array of 

204 sampling points with shape `n x dim`. 

205 

206 Returns 

207 ------- 

208 res : OptimizeResult 

209 The optimization result represented as a `OptimizeResult` object. 

210 Important attributes are: 

211 ``x`` the solution array corresponding to the global minimum, 

212 ``fun`` the function output at the global solution, 

213 ``xl`` an ordered list of local minima solutions, 

214 ``funl`` the function output at the corresponding local solutions, 

215 ``success`` a Boolean flag indicating if the optimizer exited 

216 successfully, 

217 ``message`` which describes the cause of the termination, 

218 ``nfev`` the total number of objective function evaluations including 

219 the sampling calls, 

220 ``nlfev`` the total number of objective function evaluations 

221 culminating from all local search optimizations, 

222 ``nit`` number of iterations performed by the global routine. 

223 

224 Notes 

225 ----- 

226 Global optimization using simplicial homology global optimization [1]_. 

227 Appropriate for solving general purpose NLP and blackbox optimization 

228 problems to global optimality (low-dimensional problems). 

229 

230 In general, the optimization problems are of the form:: 

231 

232 minimize f(x) subject to 

233 

234 g_i(x) >= 0, i = 1,...,m 

235 h_j(x) = 0, j = 1,...,p 

236 

237 where x is a vector of one or more variables. ``f(x)`` is the objective 

238 function ``R^n -> R``, ``g_i(x)`` are the inequality constraints, and 

239 ``h_j(x)`` are the equality constraints. 

240 

241 Optionally, the lower and upper bounds for each element in x can also be 

242 specified using the `bounds` argument. 

243 

244 While most of the theoretical advantages of SHGO are only proven for when 

245 ``f(x)`` is a Lipschitz smooth function, the algorithm is also proven to 

246 converge to the global optimum for the more general case where ``f(x)`` is 

247 non-continuous, non-convex and non-smooth, if the default sampling method 

248 is used [1]_. 

249 

250 The local search method may be specified using the ``minimizer_kwargs`` 

251 parameter which is passed on to ``scipy.optimize.minimize``. By default, 

252 the ``SLSQP`` method is used. In general, it is recommended to use the 

253 ``SLSQP`` or ``COBYLA`` local minimization if inequality constraints 

254 are defined for the problem since the other methods do not use constraints. 

255 

256 The ``halton`` and ``sobol`` method points are generated using 

257 `scipy.stats.qmc`. Any other QMC method could be used. 

258 

259 References 

260 ---------- 

261 .. [1] Endres, SC, Sandrock, C, Focke, WW (2018) "A simplicial homology 

262 algorithm for lipschitz optimisation", Journal of Global Optimization. 

263 .. [2] Joe, SW and Kuo, FY (2008) "Constructing Sobol' sequences with 

264 better two-dimensional projections", SIAM J. Sci. Comput. 30, 

265 2635-2654. 

266 .. [3] Hoch, W and Schittkowski, K (1981) "Test examples for nonlinear 

267 programming codes", Lecture Notes in Economics and Mathematical 

268 Systems, 187. Springer-Verlag, New York. 

269 http://www.ai7.uni-bayreuth.de/test_problem_coll.pdf 

270 .. [4] Wales, DJ (2015) "Perspective: Insight into reaction coordinates and 

271 dynamics from the potential energy landscape", 

272 Journal of Chemical Physics, 142(13), 2015. 

273 

274 Examples 

275 -------- 

276 First consider the problem of minimizing the Rosenbrock function, `rosen`: 

277 

278 >>> import numpy as np 

279 >>> from scipy.optimize import rosen, shgo 

280 >>> bounds = [(0,2), (0, 2), (0, 2), (0, 2), (0, 2)] 

281 >>> result = shgo(rosen, bounds) 

282 >>> result.x, result.fun 

283 (array([1., 1., 1., 1., 1.]), 2.920392374190081e-18) 

284 

285 Note that bounds determine the dimensionality of the objective 

286 function and is therefore a required input, however you can specify 

287 empty bounds using ``None`` or objects like ``np.inf`` which will be 

288 converted to large float numbers. 

289 

290 >>> bounds = [(None, None), ]*4 

291 >>> result = shgo(rosen, bounds) 

292 >>> result.x 

293 array([0.99999851, 0.99999704, 0.99999411, 0.9999882 ]) 

294 

295 Next, we consider the Eggholder function, a problem with several local 

296 minima and one global minimum. We will demonstrate the use of arguments and 

297 the capabilities of `shgo`. 

298 (https://en.wikipedia.org/wiki/Test_functions_for_optimization) 

299 

300 >>> def eggholder(x): 

301 ... return (-(x[1] + 47.0) 

302 ... * np.sin(np.sqrt(abs(x[0]/2.0 + (x[1] + 47.0)))) 

303 ... - x[0] * np.sin(np.sqrt(abs(x[0] - (x[1] + 47.0)))) 

304 ... ) 

305 ... 

306 >>> bounds = [(-512, 512), (-512, 512)] 

307 

308 `shgo` has built-in low discrepancy sampling sequences. First, we will 

309 input 64 initial sampling points of the *Sobol'* sequence: 

310 

311 >>> result = shgo(eggholder, bounds, n=64, sampling_method='sobol') 

312 >>> result.x, result.fun 

313 (array([512. , 404.23180824]), -959.6406627208397) 

314 

315 `shgo` also has a return for any other local minima that was found, these 

316 can be called using: 

317 

318 >>> result.xl 

319 array([[ 512. , 404.23180824], 

320 [ 283.0759062 , -487.12565635], 

321 [-294.66820039, -462.01964031], 

322 [-105.87688911, 423.15323845], 

323 [-242.97926 , 274.38030925], 

324 [-506.25823477, 6.3131022 ], 

325 [-408.71980731, -156.10116949], 

326 [ 150.23207937, 301.31376595], 

327 [ 91.00920901, -391.283763 ], 

328 [ 202.89662724, -269.38043241], 

329 [ 361.66623976, -106.96493868], 

330 [-219.40612786, -244.06020508]]) 

331 

332 >>> result.funl 

333 array([-959.64066272, -718.16745962, -704.80659592, -565.99778097, 

334 -559.78685655, -557.36868733, -507.87385942, -493.9605115 , 

335 -426.48799655, -421.15571437, -419.31194957, -410.98477763]) 

336 

337 These results are useful in applications where there are many global minima 

338 and the values of other global minima are desired or where the local minima 

339 can provide insight into the system (for example morphologies 

340 in physical chemistry [4]_). 

341 

342 If we want to find a larger number of local minima, we can increase the 

343 number of sampling points or the number of iterations. We'll increase the 

344 number of sampling points to 64 and the number of iterations from the 

345 default of 1 to 3. Using ``simplicial`` this would have given us 

346 64 x 3 = 192 initial sampling points. 

347 

348 >>> result_2 = shgo(eggholder, bounds, n=64, iters=3, sampling_method='sobol') 

349 >>> len(result.xl), len(result_2.xl) 

350 (12, 20) 

351 

352 Note the difference between, e.g., ``n=192, iters=1`` and ``n=64, 

353 iters=3``. 

354 In the first case the promising points contained in the minimiser pool 

355 are processed only once. In the latter case it is processed every 64 

356 sampling points for a total of 3 times. 

357 

358 To demonstrate solving problems with non-linear constraints consider the 

359 following example from Hock and Schittkowski problem 73 (cattle-feed) [3]_:: 

360 

361 minimize: f = 24.55 * x_1 + 26.75 * x_2 + 39 * x_3 + 40.50 * x_4 

362 

363 subject to: 2.3 * x_1 + 5.6 * x_2 + 11.1 * x_3 + 1.3 * x_4 - 5 >= 0, 

364 

365 12 * x_1 + 11.9 * x_2 + 41.8 * x_3 + 52.1 * x_4 - 21 

366 -1.645 * sqrt(0.28 * x_1**2 + 0.19 * x_2**2 + 

367 20.5 * x_3**2 + 0.62 * x_4**2) >= 0, 

368 

369 x_1 + x_2 + x_3 + x_4 - 1 == 0, 

370 

371 1 >= x_i >= 0 for all i 

372 

373 The approximate answer given in [3]_ is:: 

374 

375 f([0.6355216, -0.12e-11, 0.3127019, 0.05177655]) = 29.894378 

376 

377 >>> def f(x): # (cattle-feed) 

378 ... return 24.55*x[0] + 26.75*x[1] + 39*x[2] + 40.50*x[3] 

379 ... 

380 >>> def g1(x): 

381 ... return 2.3*x[0] + 5.6*x[1] + 11.1*x[2] + 1.3*x[3] - 5 # >=0 

382 ... 

383 >>> def g2(x): 

384 ... return (12*x[0] + 11.9*x[1] +41.8*x[2] + 52.1*x[3] - 21 

385 ... - 1.645 * np.sqrt(0.28*x[0]**2 + 0.19*x[1]**2 

386 ... + 20.5*x[2]**2 + 0.62*x[3]**2) 

387 ... ) # >=0 

388 ... 

389 >>> def h1(x): 

390 ... return x[0] + x[1] + x[2] + x[3] - 1 # == 0 

391 ... 

392 >>> cons = ({'type': 'ineq', 'fun': g1}, 

393 ... {'type': 'ineq', 'fun': g2}, 

394 ... {'type': 'eq', 'fun': h1}) 

395 >>> bounds = [(0, 1.0),]*4 

396 >>> res = shgo(f, bounds, iters=3, constraints=cons) 

397 >>> res 

398 message: Optimization terminated successfully. 

399 success: True 

400 fun: 29.894378159142136 

401 funl: [ 2.989e+01] 

402 x: [ 6.355e-01 1.137e-13 3.127e-01 5.178e-02] 

403 xl: [[ 6.355e-01 1.137e-13 3.127e-01 5.178e-02]] 

404 nit: 3 

405 nfev: 114 

406 nlfev: 35 

407 nljev: 5 

408 nlhev: 0 

409 

410 >>> g1(res.x), g2(res.x), h1(res.x) 

411 (-5.062616992290714e-14, -2.9594104944408173e-12, 0.0) 

412 

413 """ 

414 

415 # if necessary, convert bounds class to old bounds 

416 if isinstance(bounds, Bounds): 

417 bounds = new_bounds_to_old(bounds.lb, bounds.ub, len(bounds.lb)) 

418 

419 # Initiate SHGO class 

420 shc = SHGO(func, bounds, args=args, constraints=constraints, n=n, 

421 iters=iters, callback=callback, 

422 minimizer_kwargs=minimizer_kwargs, 

423 options=options, sampling_method=sampling_method) 

424 

425 # Run the algorithm, process results and test success 

426 shc.construct_complex() 

427 

428 if not shc.break_routine: 

429 if shc.disp: 

430 print("Successfully completed construction of complex.") 

431 

432 # Test post iterations success 

433 if len(shc.LMC.xl_maps) == 0: 

434 # If sampling failed to find pool, return lowest sampled point 

435 # with a warning 

436 shc.find_lowest_vertex() 

437 shc.break_routine = True 

438 shc.fail_routine(mes="Failed to find a feasible minimizer point. " 

439 "Lowest sampling point = {}".format(shc.f_lowest)) 

440 shc.res.fun = shc.f_lowest 

441 shc.res.x = shc.x_lowest 

442 shc.res.nfev = shc.fn 

443 

444 # Confirm the routine ran successfully 

445 if not shc.break_routine: 

446 shc.res.message = 'Optimization terminated successfully.' 

447 shc.res.success = True 

448 

449 # Return the final results 

450 return shc.res 

451 

452 

453class SHGO: 

454 def __init__(self, func, bounds, args=(), constraints=None, n=None, 

455 iters=None, callback=None, minimizer_kwargs=None, 

456 options=None, sampling_method='sobol'): 

457 

458 from scipy.stats import qmc 

459 

460 # Input checks 

461 methods = ['halton', 'sobol', 'simplicial'] 

462 if isinstance(sampling_method, str) and sampling_method not in methods: 

463 raise ValueError(("Unknown sampling_method specified." 

464 " Valid methods: {}").format(', '.join(methods))) 

465 

466 # Initiate class 

467 self._raw_func = func # some methods pass args in (e.g. Complex) 

468 _, self.func = _wrap_scalar_function(func, args) 

469 self.bounds = bounds 

470 self.args = args 

471 self.callback = callback 

472 

473 # Bounds 

474 abound = np.array(bounds, float) 

475 self.dim = np.shape(abound)[0] # Dimensionality of problem 

476 

477 # Set none finite values to large floats 

478 infind = ~np.isfinite(abound) 

479 abound[infind[:, 0], 0] = -1e50 

480 abound[infind[:, 1], 1] = 1e50 

481 

482 # Check if bounds are correctly specified 

483 bnderr = abound[:, 0] > abound[:, 1] 

484 if bnderr.any(): 

485 raise ValueError('Error: lb > ub in bounds {}.' 

486 .format(', '.join(str(b) for b in bnderr))) 

487 

488 self.bounds = abound 

489 

490 # Constraints 

491 # Process constraint dict sequence: 

492 if constraints is not None: 

493 self.min_cons = constraints 

494 self.g_cons = [] 

495 self.g_args = [] 

496 if (type(constraints) is not tuple) and (type(constraints) 

497 is not list): 

498 constraints = (constraints,) 

499 

500 for cons in constraints: 

501 if cons['type'] == 'ineq': 

502 self.g_cons.append(cons['fun']) 

503 try: 

504 self.g_args.append(cons['args']) 

505 except KeyError: 

506 self.g_args.append(()) 

507 self.g_cons = tuple(self.g_cons) 

508 self.g_args = tuple(self.g_args) 

509 else: 

510 self.g_cons = None 

511 self.g_args = None 

512 

513 # Define local minimization keyword arguments 

514 # Start with defaults 

515 self.minimizer_kwargs = {'method': 'SLSQP', 

516 'bounds': self.bounds, 

517 'options': {}, 

518 'callback': self.callback 

519 } 

520 if minimizer_kwargs is not None: 

521 # Overwrite with supplied values 

522 self.minimizer_kwargs.update(minimizer_kwargs) 

523 

524 else: 

525 self.minimizer_kwargs['options'] = {'ftol': 1e-12} 

526 

527 if (self.minimizer_kwargs['method'] in ('SLSQP', 'COBYLA') and 

528 (minimizer_kwargs is not None and 

529 'constraints' not in minimizer_kwargs and 

530 constraints is not None) or 

531 (self.g_cons is not None)): 

532 self.minimizer_kwargs['constraints'] = self.min_cons 

533 

534 # Process options dict 

535 if options is not None: 

536 self.init_options(options) 

537 else: # Default settings: 

538 self.f_min_true = None 

539 self.minimize_every_iter = False 

540 

541 # Algorithm limits 

542 self.maxiter = None 

543 self.maxfev = None 

544 self.maxev = None 

545 self.maxtime = None 

546 self.f_min_true = None 

547 self.minhgrd = None 

548 

549 # Objective function knowledge 

550 self.symmetry = False 

551 

552 # Algorithm functionality 

553 self.local_iter = False 

554 self.infty_cons_sampl = True 

555 

556 # Feedback 

557 self.disp = False 

558 

559 # Remove unknown arguments in self.minimizer_kwargs 

560 # Start with arguments all the solvers have in common 

561 self.min_solver_args = ['fun', 'x0', 'args', 

562 'callback', 'options', 'method'] 

563 # then add the ones unique to specific solvers 

564 solver_args = { 

565 '_custom': ['jac', 'hess', 'hessp', 'bounds', 'constraints'], 

566 'nelder-mead': [], 

567 'powell': [], 

568 'cg': ['jac'], 

569 'bfgs': ['jac'], 

570 'newton-cg': ['jac', 'hess', 'hessp'], 

571 'l-bfgs-b': ['jac', 'bounds'], 

572 'tnc': ['jac', 'bounds'], 

573 'cobyla': ['constraints'], 

574 'slsqp': ['jac', 'bounds', 'constraints'], 

575 'dogleg': ['jac', 'hess'], 

576 'trust-ncg': ['jac', 'hess', 'hessp'], 

577 'trust-krylov': ['jac', 'hess', 'hessp'], 

578 'trust-exact': ['jac', 'hess'], 

579 'trust-constr': ['jac', 'hess', 'hessp'], 

580 } 

581 method = self.minimizer_kwargs['method'] 

582 self.min_solver_args += solver_args[method.lower()] 

583 

584 # Only retain the known arguments 

585 def _restrict_to_keys(dictionary, goodkeys): 

586 """Remove keys from dictionary if not in goodkeys - inplace""" 

587 existingkeys = set(dictionary) 

588 for key in existingkeys - set(goodkeys): 

589 dictionary.pop(key, None) 

590 

591 _restrict_to_keys(self.minimizer_kwargs, self.min_solver_args) 

592 _restrict_to_keys(self.minimizer_kwargs['options'], 

593 self.min_solver_args + ['ftol']) 

594 

595 # Algorithm controls 

596 # Global controls 

597 self.stop_global = False # Used in the stopping_criteria method 

598 self.break_routine = False # Break the algorithm globally 

599 self.iters = iters # Iterations to be ran 

600 self.iters_done = 0 # Iterations to be ran 

601 self.n = n # Sampling points per iteration 

602 self.nc = n # Sampling points to sample in current iteration 

603 self.n_prc = 0 # Processed points (used to track Delaunay iters) 

604 self.n_sampled = 0 # To track number of sampling points already generated 

605 self.fn = 0 # Number of feasible sampling points evaluations performed 

606 self.hgr = 0 # Homology group rank 

607 

608 # Default settings if no sampling criteria. 

609 if self.iters is None: 

610 self.iters = 1 

611 if self.n is None: 

612 self.n = 100 

613 if sampling_method == 'sobol': 

614 self.n = 128 

615 self.nc = self.n 

616 

617 if not ((self.maxiter is None) and (self.maxfev is None) and ( 

618 self.maxev is None) 

619 and (self.minhgrd is None) and (self.f_min_true is None)): 

620 self.iters = None 

621 

622 # Set complex construction mode based on a provided stopping criteria: 

623 # Choose complex constructor 

624 if sampling_method == 'simplicial': 

625 self.iterate_complex = self.iterate_hypercube 

626 self.minimizers = self.simplex_minimizers 

627 self.sampling_method = sampling_method 

628 

629 elif sampling_method in ['halton', 'sobol'] or \ 

630 not isinstance(sampling_method, str): 

631 self.iterate_complex = self.iterate_delaunay 

632 self.minimizers = self.delaunay_complex_minimisers 

633 # Sampling method used 

634 if sampling_method in ['halton', 'sobol']: 

635 if sampling_method == 'sobol': 

636 self.n = int(2 ** np.ceil(np.log2(self.n))) 

637 self.nc = self.n 

638 self.sampling_method = 'sobol' 

639 self.qmc_engine = qmc.Sobol(d=self.dim, scramble=False, 

640 seed=np.random.RandomState()) 

641 else: 

642 self.sampling_method = 'halton' 

643 self.qmc_engine = qmc.Halton(d=self.dim, scramble=True, 

644 seed=np.random.RandomState()) 

645 sampling_method = lambda n, d: self.qmc_engine.random(n) 

646 else: 

647 # A user defined sampling method: 

648 self.sampling_method = 'custom' 

649 

650 self.sampling = self.sampling_custom 

651 self.sampling_function = sampling_method # F(n, d) 

652 

653 # Local controls 

654 self.stop_l_iter = False # Local minimisation iterations 

655 self.stop_complex_iter = False # Sampling iterations 

656 

657 # Initiate storage objects used in algorithm classes 

658 self.minimizer_pool = [] 

659 

660 # Cache of local minimizers mapped 

661 self.LMC = LMapCache() 

662 

663 # Initialize return object 

664 self.res = OptimizeResult() # scipy.optimize.OptimizeResult object 

665 self.res.nfev = 0 # Includes each sampling point as func evaluation 

666 self.res.nlfev = 0 # Local function evals for all minimisers 

667 self.res.nljev = 0 # Local Jacobian evals for all minimisers 

668 self.res.nlhev = 0 # Local Hessian evals for all minimisers 

669 

670 # Initiation aids 

671 def init_options(self, options): 

672 """ 

673 Initiates the options. 

674 

675 Can also be useful to change parameters after class initiation. 

676 

677 Parameters 

678 ---------- 

679 options : dict 

680 

681 Returns 

682 ------- 

683 None 

684 

685 """ 

686 # Update 'options' dict passed to optimize.minimize 

687 # Do this first so we don't mutate `options` below. 

688 self.minimizer_kwargs['options'].update(options) 

689 

690 # Ensure that 'jac', 'hess', and 'hessp' are passed directly to 

691 # `minimize` as keywords, not as part of its 'options' dictionary. 

692 for opt in ['jac', 'hess', 'hessp']: 

693 if opt in self.minimizer_kwargs['options']: 

694 self.minimizer_kwargs[opt] = ( 

695 self.minimizer_kwargs['options'].pop(opt)) 

696 

697 # Default settings: 

698 self.minimize_every_iter = options.get('minimize_every_iter', False) 

699 

700 # Algorithm limits 

701 # Maximum number of iterations to perform. 

702 self.maxiter = options.get('maxiter', None) 

703 # Maximum number of function evaluations in the feasible domain 

704 self.maxfev = options.get('maxfev', None) 

705 # Maximum number of sampling evaluations (includes searching in 

706 # infeasible points 

707 self.maxev = options.get('maxev', None) 

708 # Maximum processing runtime allowed 

709 self.init = time.time() 

710 self.maxtime = options.get('maxtime', None) 

711 if 'f_min' in options: 

712 # Specify the minimum objective function value, if it is known. 

713 self.f_min_true = options['f_min'] 

714 self.f_tol = options.get('f_tol', 1e-4) 

715 else: 

716 self.f_min_true = None 

717 

718 self.minhgrd = options.get('minhgrd', None) 

719 

720 # Objective function knowledge 

721 self.symmetry = 'symmetry' in options 

722 

723 # Algorithm functionality 

724 # Only evaluate a few of the best candiates 

725 self.local_iter = options.get('local_iter', False) 

726 

727 self.infty_cons_sampl = options.get('infty_constraints', True) 

728 

729 # Feedback 

730 self.disp = options.get('disp', False) 

731 

732 # Iteration properties 

733 # Main construction loop: 

734 def construct_complex(self): 

735 """ 

736 Construct for `iters` iterations. 

737 

738 If uniform sampling is used, every iteration adds 'n' sampling points. 

739 

740 Iterations if a stopping criteria (e.g., sampling points or 

741 processing time) has been met. 

742 

743 """ 

744 if self.disp: 

745 print('Splitting first generation') 

746 

747 while not self.stop_global: 

748 if self.break_routine: 

749 break 

750 # Iterate complex, process minimisers 

751 self.iterate() 

752 self.stopping_criteria() 

753 

754 # Build minimiser pool 

755 # Final iteration only needed if pools weren't minimised every iteration 

756 if not self.minimize_every_iter: 

757 if not self.break_routine: 

758 self.find_minima() 

759 

760 self.res.nit = self.iters_done + 1 

761 

762 def find_minima(self): 

763 """ 

764 Construct the minimizer pool, map the minimizers to local minima 

765 and sort the results into a global return object. 

766 """ 

767 self.minimizers() 

768 if len(self.X_min) != 0: 

769 # Minimize the pool of minimizers with local minimization methods 

770 # Note that if Options['local_iter'] is an `int` instead of default 

771 # value False then only that number of candidates will be minimized 

772 self.minimise_pool(self.local_iter) 

773 # Sort results and build the global return object 

774 self.sort_result() 

775 

776 # Lowest values used to report in case of failures 

777 self.f_lowest = self.res.fun 

778 self.x_lowest = self.res.x 

779 else: 

780 self.find_lowest_vertex() 

781 

782 def find_lowest_vertex(self): 

783 # Find the lowest objective function value on one of 

784 # the vertices of the simplicial complex 

785 if self.sampling_method == 'simplicial': 

786 self.f_lowest = np.inf 

787 for x in self.HC.V.cache: 

788 if self.HC.V[x].f < self.f_lowest: 

789 self.f_lowest = self.HC.V[x].f 

790 self.x_lowest = self.HC.V[x].x_a 

791 if self.f_lowest == np.inf: # no feasible point 

792 self.f_lowest = None 

793 self.x_lowest = None 

794 else: 

795 if self.fn == 0: 

796 self.f_lowest = None 

797 self.x_lowest = None 

798 else: 

799 self.f_I = np.argsort(self.F, axis=-1) 

800 self.f_lowest = self.F[self.f_I[0]] 

801 self.x_lowest = self.C[self.f_I[0]] 

802 

803 # Stopping criteria functions: 

804 def finite_iterations(self): 

805 if self.iters is not None: 

806 if self.iters_done >= (self.iters - 1): 

807 self.stop_global = True 

808 

809 if self.maxiter is not None: # Stop for infeasible sampling 

810 if self.iters_done >= (self.maxiter - 1): 

811 self.stop_global = True 

812 return self.stop_global 

813 

814 def finite_fev(self): 

815 # Finite function evals in the feasible domain 

816 if self.fn >= self.maxfev: 

817 self.stop_global = True 

818 return self.stop_global 

819 

820 def finite_ev(self): 

821 # Finite evaluations including infeasible sampling points 

822 if self.n_sampled >= self.maxev: 

823 self.stop_global = True 

824 

825 def finite_time(self): 

826 if (time.time() - self.init) >= self.maxtime: 

827 self.stop_global = True 

828 

829 def finite_precision(self): 

830 """ 

831 Stop the algorithm if the final function value is known 

832 

833 Specify in options (with ``self.f_min_true = options['f_min']``) 

834 and the tolerance with ``f_tol = options['f_tol']`` 

835 """ 

836 # If no minimizer has been found use the lowest sampling value 

837 if len(self.LMC.xl_maps) == 0: 

838 self.find_lowest_vertex() 

839 

840 # Function to stop algorithm at specified percentage error: 

841 if self.f_lowest == 0.0: 

842 if self.f_min_true == 0.0: 

843 if self.f_lowest <= self.f_tol: 

844 self.stop_global = True 

845 else: 

846 pe = (self.f_lowest - self.f_min_true) / abs(self.f_min_true) 

847 if self.f_lowest <= self.f_min_true: 

848 self.stop_global = True 

849 # 2if (pe - self.f_tol) <= abs(1.0 / abs(self.f_min_true)): 

850 if abs(pe) >= 2 * self.f_tol: 

851 warnings.warn("A much lower value than expected f* =" + 

852 " {} than".format(self.f_min_true) + 

853 " the was found f_lowest =" + 

854 "{} ".format(self.f_lowest)) 

855 if pe <= self.f_tol: 

856 self.stop_global = True 

857 

858 return self.stop_global 

859 

860 def finite_homology_growth(self): 

861 if self.LMC.size == 0: 

862 return # pass on no reason to stop yet. 

863 self.hgrd = self.LMC.size - self.hgr 

864 

865 self.hgr = self.LMC.size 

866 if self.hgrd <= self.minhgrd: 

867 self.stop_global = True 

868 return self.stop_global 

869 

870 def stopping_criteria(self): 

871 """ 

872 Various stopping criteria ran every iteration 

873 

874 Returns 

875 ------- 

876 stop : bool 

877 """ 

878 if self.maxiter is not None: 

879 self.finite_iterations() 

880 if self.iters is not None: 

881 self.finite_iterations() 

882 if self.maxfev is not None: 

883 self.finite_fev() 

884 if self.maxev is not None: 

885 self.finite_ev() 

886 if self.maxtime is not None: 

887 self.finite_time() 

888 if self.f_min_true is not None: 

889 self.finite_precision() 

890 if self.minhgrd is not None: 

891 self.finite_homology_growth() 

892 

893 def iterate(self): 

894 self.iterate_complex() 

895 

896 # Build minimizer pool 

897 if self.minimize_every_iter: 

898 if not self.break_routine: 

899 self.find_minima() # Process minimizer pool 

900 

901 # Algorithm updates 

902 self.iters_done += 1 

903 

904 def iterate_hypercube(self): 

905 """ 

906 Iterate a subdivision of the complex 

907 

908 Note: called with ``self.iterate_complex()`` after class initiation 

909 """ 

910 # Iterate the complex 

911 if self.n_sampled == 0: 

912 # Initial triangulation of the hyper-rectangle. Note that 

913 # we use `self.raw_func` as `self.func` is a *wrapped* function 

914 # that already takes the original function arguments into 

915 # account. 

916 self.HC = Complex(self.dim, self._raw_func, self.args, 

917 self.symmetry, self.bounds, self.g_cons, 

918 self.g_args) 

919 else: 

920 self.HC.split_generation() 

921 

922 # feasible sampling points counted by the triangulation.py routines 

923 self.fn = self.HC.V.nfev 

924 self.n_sampled = self.HC.V.size # nevs counted in triangulation.py 

925 return 

926 

927 def iterate_delaunay(self): 

928 """ 

929 Build a complex of Delaunay triangulated points 

930 

931 Note: called with ``self.iterate_complex()`` after class initiation 

932 """ 

933 self.sampled_surface(infty_cons_sampl=self.infty_cons_sampl) 

934 self.nc += self.n 

935 self.n_sampled = self.nc 

936 

937 # Hypercube minimizers 

938 def simplex_minimizers(self): 

939 """ 

940 Returns the indexes of all minimizers 

941 """ 

942 self.minimizer_pool = [] 

943 # Note: Can implement parallelization here 

944 for x in self.HC.V.cache: 

945 if self.HC.V[x].minimiser(): 

946 if self.disp: 

947 logging.info('=' * 60) 

948 logging.info( 

949 'v.x = {} is minimizer'.format(self.HC.V[x].x_a)) 

950 logging.info('v.f = {} is minimizer'.format(self.HC.V[x].f)) 

951 logging.info('=' * 30) 

952 

953 if self.HC.V[x] not in self.minimizer_pool: 

954 self.minimizer_pool.append(self.HC.V[x]) 

955 

956 if self.disp: 

957 logging.info('Neighbors:') 

958 logging.info('=' * 30) 

959 for vn in self.HC.V[x].nn: 

960 logging.info('x = {} || f = {}'.format(vn.x, vn.f)) 

961 

962 logging.info('=' * 60) 

963 

964 self.minimizer_pool_F = [] 

965 self.X_min = [] 

966 # normalized tuple in the Vertex cache 

967 self.X_min_cache = {} # Cache used in hypercube sampling 

968 

969 for v in self.minimizer_pool: 

970 self.X_min.append(v.x_a) 

971 self.minimizer_pool_F.append(v.f) 

972 self.X_min_cache[tuple(v.x_a)] = v.x 

973 

974 self.minimizer_pool_F = np.array(self.minimizer_pool_F) 

975 self.X_min = np.array(self.X_min) 

976 

977 # TODO: Only do this if global mode 

978 self.sort_min_pool() 

979 

980 return self.X_min 

981 

982 # Local minimization 

983 # Minimizer pool processing 

984 def minimise_pool(self, force_iter=False): 

985 """ 

986 This processing method can optionally minimise only the best candidate 

987 solutions in the minimizer pool 

988 

989 Parameters 

990 ---------- 

991 force_iter : int 

992 Number of starting minimizers to process (can be sepcified 

993 globally or locally) 

994 

995 """ 

996 # Find first local minimum 

997 # NOTE: Since we always minimize this value regardless it is a waste to 

998 # build the topograph first before minimizing 

999 lres_f_min = self.minimize(self.X_min[0], ind=self.minimizer_pool[0]) 

1000 

1001 # Trim minimized point from current minimizer set 

1002 self.trim_min_pool(0) 

1003 

1004 # Force processing to only 

1005 if force_iter: 

1006 self.local_iter = force_iter 

1007 

1008 while not self.stop_l_iter: 

1009 # Global stopping criteria: 

1010 if self.f_min_true is not None: 

1011 if (lres_f_min.fun - self.f_min_true) / abs( 

1012 self.f_min_true) <= self.f_tol: 

1013 self.stop_l_iter = True 

1014 break 

1015 # Note first iteration is outside loop: 

1016 if self.local_iter is not None: 

1017 if self.disp: 

1018 logging.info( 

1019 'SHGO.iters in function minimise_pool = {}'.format( 

1020 self.local_iter)) 

1021 self.local_iter -= 1 

1022 if self.local_iter == 0: 

1023 self.stop_l_iter = True 

1024 break 

1025 

1026 if np.shape(self.X_min)[0] == 0: 

1027 self.stop_l_iter = True 

1028 break 

1029 

1030 # Construct topograph from current minimizer set 

1031 # (NOTE: This is a very small topograph using only the minizer pool 

1032 # , it might be worth using some graph theory tools instead. 

1033 self.g_topograph(lres_f_min.x, self.X_min) 

1034 

1035 # Find local minimum at the miniser with the greatest Euclidean 

1036 # distance from the current solution 

1037 ind_xmin_l = self.Z[:, -1] 

1038 lres_f_min = self.minimize(self.Ss[-1, :], self.minimizer_pool[-1]) 

1039 

1040 # Trim minimised point from current minimizer set 

1041 self.trim_min_pool(ind_xmin_l) 

1042 

1043 # Reset controls 

1044 self.stop_l_iter = False 

1045 return 

1046 

1047 def sort_min_pool(self): 

1048 # Sort to find minimum func value in min_pool 

1049 self.ind_f_min = np.argsort(self.minimizer_pool_F) 

1050 self.minimizer_pool = np.array(self.minimizer_pool)[self.ind_f_min] 

1051 self.minimizer_pool_F = np.array(self.minimizer_pool_F)[ 

1052 self.ind_f_min] 

1053 return 

1054 

1055 def trim_min_pool(self, trim_ind): 

1056 self.X_min = np.delete(self.X_min, trim_ind, axis=0) 

1057 self.minimizer_pool_F = np.delete(self.minimizer_pool_F, trim_ind) 

1058 self.minimizer_pool = np.delete(self.minimizer_pool, trim_ind) 

1059 return 

1060 

1061 def g_topograph(self, x_min, X_min): 

1062 """ 

1063 Returns the topographical vector stemming from the specified value 

1064 ``x_min`` for the current feasible set ``X_min`` with True boolean 

1065 values indicating positive entries and False values indicating 

1066 negative entries. 

1067 

1068 """ 

1069 x_min = np.array([x_min]) 

1070 self.Y = spatial.distance.cdist(x_min, X_min, 'euclidean') 

1071 # Find sorted indexes of spatial distances: 

1072 self.Z = np.argsort(self.Y, axis=-1) 

1073 

1074 self.Ss = X_min[self.Z][0] 

1075 self.minimizer_pool = self.minimizer_pool[self.Z] 

1076 self.minimizer_pool = self.minimizer_pool[0] 

1077 return self.Ss 

1078 

1079 # Local bound functions 

1080 def construct_lcb_simplicial(self, v_min): 

1081 """ 

1082 Construct locally (approximately) convex bounds 

1083 

1084 Parameters 

1085 ---------- 

1086 v_min : Vertex object 

1087 The minimizer vertex 

1088 

1089 Returns 

1090 ------- 

1091 cbounds : list of lists 

1092 List of size dimension with length-2 list of bounds for each dimension 

1093 

1094 """ 

1095 cbounds = [[x_b_i[0], x_b_i[1]] for x_b_i in self.bounds] 

1096 # Loop over all bounds 

1097 for vn in v_min.nn: 

1098 for i, x_i in enumerate(vn.x_a): 

1099 # Lower bound 

1100 if (x_i < v_min.x_a[i]) and (x_i > cbounds[i][0]): 

1101 cbounds[i][0] = x_i 

1102 

1103 # Upper bound 

1104 if (x_i > v_min.x_a[i]) and (x_i < cbounds[i][1]): 

1105 cbounds[i][1] = x_i 

1106 

1107 if self.disp: 

1108 logging.info('cbounds found for v_min.x_a = {}'.format(v_min.x_a)) 

1109 logging.info('cbounds = {}'.format(cbounds)) 

1110 

1111 return cbounds 

1112 

1113 def construct_lcb_delaunay(self, v_min, ind=None): 

1114 """ 

1115 Construct locally (approximately) convex bounds 

1116 

1117 Parameters 

1118 ---------- 

1119 v_min : Vertex object 

1120 The minimizer vertex 

1121 

1122 Returns 

1123 ------- 

1124 cbounds : list of lists 

1125 List of size dimension with length-2 list of bounds for each dimension 

1126 """ 

1127 cbounds = [[x_b_i[0], x_b_i[1]] for x_b_i in self.bounds] 

1128 

1129 return cbounds 

1130 

1131 # Minimize a starting point locally 

1132 def minimize(self, x_min, ind=None): 

1133 """ 

1134 This function is used to calculate the local minima using the specified 

1135 sampling point as a starting value. 

1136 

1137 Parameters 

1138 ---------- 

1139 x_min : vector of floats 

1140 Current starting point to minimize. 

1141 

1142 Returns 

1143 ------- 

1144 lres : OptimizeResult 

1145 The local optimization result represented as a `OptimizeResult` 

1146 object. 

1147 """ 

1148 # Use minima maps if vertex was already run 

1149 if self.disp: 

1150 logging.info('Vertex minimiser maps = {}'.format(self.LMC.v_maps)) 

1151 

1152 if self.LMC[x_min].lres is not None: 

1153 return self.LMC[x_min].lres 

1154 

1155 # TODO: Check discarded bound rules 

1156 

1157 if self.callback is not None: 

1158 print('Callback for ' 

1159 'minimizer starting at {}:'.format(x_min)) 

1160 

1161 if self.disp: 

1162 print('Starting ' 

1163 'minimization at {}...'.format(x_min)) 

1164 

1165 if self.sampling_method == 'simplicial': 

1166 x_min_t = tuple(x_min) 

1167 # Find the normalized tuple in the Vertex cache: 

1168 x_min_t_norm = self.X_min_cache[tuple(x_min_t)] 

1169 

1170 x_min_t_norm = tuple(x_min_t_norm) 

1171 

1172 g_bounds = self.construct_lcb_simplicial(self.HC.V[x_min_t_norm]) 

1173 if 'bounds' in self.min_solver_args: 

1174 self.minimizer_kwargs['bounds'] = g_bounds 

1175 

1176 else: 

1177 g_bounds = self.construct_lcb_delaunay(x_min, ind=ind) 

1178 if 'bounds' in self.min_solver_args: 

1179 self.minimizer_kwargs['bounds'] = g_bounds 

1180 

1181 if self.disp and 'bounds' in self.minimizer_kwargs: 

1182 print('bounds in kwarg:') 

1183 print(self.minimizer_kwargs['bounds']) 

1184 

1185 # Local minimization using scipy.optimize.minimize: 

1186 lres = minimize(self.func, x_min, **self.minimizer_kwargs) 

1187 

1188 if self.disp: 

1189 print('lres = {}'.format(lres)) 

1190 

1191 # Local function evals for all minimizers 

1192 self.res.nlfev += lres.nfev 

1193 if 'njev' in lres: 

1194 self.res.nljev += lres.njev 

1195 if 'nhev' in lres: 

1196 self.res.nlhev += lres.nhev 

1197 

1198 try: # Needed because of the brain dead 1x1 NumPy arrays 

1199 lres.fun = lres.fun[0] 

1200 except (IndexError, TypeError): 

1201 lres.fun 

1202 

1203 # Append minima maps 

1204 self.LMC[x_min] 

1205 self.LMC.add_res(x_min, lres, bounds=g_bounds) 

1206 

1207 return lres 

1208 

1209 # Post local minimization processing 

1210 def sort_result(self): 

1211 """ 

1212 Sort results and build the global return object 

1213 """ 

1214 # Sort results in local minima cache 

1215 results = self.LMC.sort_cache_result() 

1216 self.res.xl = results['xl'] 

1217 self.res.funl = results['funl'] 

1218 self.res.x = results['x'] 

1219 self.res.fun = results['fun'] 

1220 

1221 # Add local func evals to sampling func evals 

1222 # Count the number of feasible vertices and add to local func evals: 

1223 self.res.nfev = self.fn + self.res.nlfev 

1224 return self.res 

1225 

1226 # Algorithm controls 

1227 def fail_routine(self, mes=("Failed to converge")): 

1228 self.break_routine = True 

1229 self.res.success = False 

1230 self.X_min = [None] 

1231 self.res.message = mes 

1232 

1233 def sampled_surface(self, infty_cons_sampl=False): 

1234 """ 

1235 Sample the function surface. 

1236 

1237 There are 2 modes, if ``infty_cons_sampl`` is True then the sampled 

1238 points that are generated outside the feasible domain will be 

1239 assigned an ``inf`` value in accordance with SHGO rules. 

1240 This guarantees convergence and usually requires less objective function 

1241 evaluations at the computational costs of more Delaunay triangulation 

1242 points. 

1243 

1244 If ``infty_cons_sampl`` is False, then the infeasible points are discarded 

1245 and only a subspace of the sampled points are used. This comes at the 

1246 cost of the loss of guaranteed convergence and usually requires more 

1247 objective function evaluations. 

1248 """ 

1249 # Generate sampling points 

1250 if self.disp: 

1251 print('Generating sampling points') 

1252 self.sampling(self.nc, self.dim) 

1253 self.n = self.nc 

1254 

1255 if not infty_cons_sampl: 

1256 # Find subspace of feasible points 

1257 if self.g_cons is not None: 

1258 self.sampling_subspace() 

1259 

1260 # Sort remaining samples 

1261 self.sorted_samples() 

1262 

1263 # Find objective function references 

1264 self.fun_ref() 

1265 

1266 self.n_sampled = self.nc 

1267 

1268 def delaunay_complex_minimisers(self): 

1269 # Construct complex minimizers on the current sampling set. 

1270 # if self.fn >= (self.dim + 1): 

1271 if self.fn >= (self.dim + 2): 

1272 # TODO: Check on strange Qhull error where the number of vertices 

1273 # required for an initial simplex is higher than n + 1? 

1274 if self.dim < 2: # Scalar objective functions 

1275 if self.disp: 

1276 print('Constructing 1-D minimizer pool') 

1277 

1278 self.ax_subspace() 

1279 self.surface_topo_ref() 

1280 self.minimizers_1D() 

1281 

1282 else: # Multivariate functions. 

1283 if self.disp: 

1284 print('Constructing Gabrial graph and minimizer pool') 

1285 

1286 if self.iters == 1: 

1287 self.delaunay_triangulation(grow=False) 

1288 else: 

1289 self.delaunay_triangulation(grow=True, n_prc=self.n_prc) 

1290 self.n_prc = self.C.shape[0] 

1291 

1292 if self.disp: 

1293 print('Triangulation completed, building minimizer pool') 

1294 

1295 self.delaunay_minimizers() 

1296 

1297 if self.disp: 

1298 logging.info( 

1299 "Minimizer pool = SHGO.X_min = {}".format(self.X_min)) 

1300 else: 

1301 if self.disp: 

1302 print( 

1303 'Not enough sampling points found in the feasible domain.') 

1304 self.minimizer_pool = [None] 

1305 try: 

1306 self.X_min 

1307 except AttributeError: 

1308 self.X_min = [] 

1309 

1310 def sampling_custom(self, n, dim): 

1311 """ 

1312 Generates uniform sampling points in a hypercube and scales the points 

1313 to the bound limits. 

1314 """ 

1315 # Generate sampling points. 

1316 # Generate uniform sample points in [0, 1]^m \subset R^m 

1317 self.C = self.sampling_function(n, dim) 

1318 # Distribute over bounds 

1319 for i in range(len(self.bounds)): 

1320 self.C[:, i] = (self.C[:, i] * 

1321 (self.bounds[i][1] - self.bounds[i][0]) 

1322 + self.bounds[i][0]) 

1323 return self.C 

1324 

1325 def sampling_subspace(self): 

1326 """Find subspace of feasible points from g_func definition""" 

1327 # Subspace of feasible points. 

1328 for ind, g in enumerate(self.g_cons): 

1329 self.C = self.C[g(self.C.T, *self.g_args[ind]) >= 0.0] 

1330 if self.C.size == 0: 

1331 self.res.message = ('No sampling point found within the ' 

1332 + 'feasible set. Increasing sampling ' 

1333 + 'size.') 

1334 # sampling correctly for both 1-D and >1-D cases 

1335 if self.disp: 

1336 print(self.res.message) 

1337 

1338 def sorted_samples(self): # Validated 

1339 """Find indexes of the sorted sampling points""" 

1340 self.Ind_sorted = np.argsort(self.C, axis=0) 

1341 self.Xs = self.C[self.Ind_sorted] 

1342 return self.Ind_sorted, self.Xs 

1343 

1344 def ax_subspace(self): # Validated 

1345 """ 

1346 Finds the subspace vectors along each component axis. 

1347 """ 

1348 self.Ci = [] 

1349 self.Xs_i = [] 

1350 self.Ii = [] 

1351 for i in range(self.dim): 

1352 self.Ci.append(self.C[:, i]) 

1353 self.Ii.append(self.Ind_sorted[:, i]) 

1354 self.Xs_i.append(self.Xs[:, i]) 

1355 

1356 def fun_ref(self): 

1357 """ 

1358 Find the objective function output reference table 

1359 """ 

1360 # TODO: Replace with cached wrapper 

1361 

1362 # Note: This process can be pooled easily 

1363 # Obj. function returns to be used as reference table.: 

1364 f_cache_bool = False 

1365 if self.fn > 0: # Store old function evaluations 

1366 Ftemp = self.F 

1367 fn_old = self.fn 

1368 f_cache_bool = True 

1369 

1370 self.F = np.zeros(np.shape(self.C)[0]) 

1371 # NOTE: It might be easier to replace this with a cached 

1372 # objective function 

1373 for i in range(self.fn, np.shape(self.C)[0]): 

1374 eval_f = True 

1375 if self.g_cons is not None: 

1376 for g in self.g_cons: 

1377 if g(self.C[i, :], *self.args) < 0.0: 

1378 eval_f = False 

1379 break # Breaks the g loop 

1380 

1381 if eval_f: 

1382 self.F[i] = self.func(self.C[i, :]) 

1383 self.fn += 1 

1384 elif self.infty_cons_sampl: 

1385 self.F[i] = np.inf 

1386 self.fn += 1 

1387 if f_cache_bool: 

1388 if fn_old > 0: # Restore saved function evaluations 

1389 self.F[0:fn_old] = Ftemp 

1390 

1391 return self.F 

1392 

1393 def surface_topo_ref(self): # Validated 

1394 """ 

1395 Find the BD and FD finite differences along each component vector. 

1396 """ 

1397 # Replace numpy inf, -inf and nan objects with floating point numbers 

1398 # nan --> float 

1399 self.F[np.isnan(self.F)] = np.inf 

1400 # inf, -inf --> floats 

1401 self.F = np.nan_to_num(self.F) 

1402 

1403 self.Ft = self.F[self.Ind_sorted] 

1404 self.Ftp = np.diff(self.Ft, axis=0) # FD 

1405 self.Ftm = np.diff(self.Ft[::-1], axis=0)[::-1] # BD 

1406 

1407 def sample_topo(self, ind): 

1408 # Find the position of the sample in the component axial directions 

1409 self.Xi_ind_pos = [] 

1410 self.Xi_ind_topo_i = [] 

1411 

1412 for i in range(self.dim): 

1413 for x, I_ind in zip(self.Ii[i], range(len(self.Ii[i]))): 

1414 if x == ind: 

1415 self.Xi_ind_pos.append(I_ind) 

1416 

1417 # Use the topo reference tables to find if point is a minimizer on 

1418 # the current axis 

1419 

1420 # First check if index is on the boundary of the sampling points: 

1421 if self.Xi_ind_pos[i] == 0: 

1422 # if boundary is in basin 

1423 self.Xi_ind_topo_i.append(self.Ftp[:, i][0] > 0) 

1424 

1425 elif self.Xi_ind_pos[i] == self.fn - 1: 

1426 # Largest value at sample size 

1427 self.Xi_ind_topo_i.append(self.Ftp[:, i][self.fn - 2] < 0) 

1428 

1429 # Find axial reference for other points 

1430 else: 

1431 Xi_ind_top_p = self.Ftp[:, i][self.Xi_ind_pos[i]] > 0 

1432 Xi_ind_top_m = self.Ftm[:, i][self.Xi_ind_pos[i] - 1] > 0 

1433 self.Xi_ind_topo_i.append(Xi_ind_top_p and Xi_ind_top_m) 

1434 

1435 if np.array(self.Xi_ind_topo_i).all(): 

1436 self.Xi_ind_topo = True 

1437 else: 

1438 self.Xi_ind_topo = False 

1439 self.Xi_ind_topo = np.array(self.Xi_ind_topo_i).all() 

1440 

1441 return self.Xi_ind_topo 

1442 

1443 def minimizers_1D(self): 

1444 """ 

1445 Returns the indices of all minimizers 

1446 """ 

1447 self.minimizer_pool = [] 

1448 # Note: Can implement parallelization here 

1449 for ind in range(self.fn): 

1450 min_bool = self.sample_topo(ind) 

1451 if min_bool: 

1452 self.minimizer_pool.append(ind) 

1453 

1454 self.minimizer_pool_F = self.F[self.minimizer_pool] 

1455 

1456 # Sort to find minimum func value in min_pool 

1457 self.sort_min_pool() 

1458 if not len(self.minimizer_pool) == 0: 

1459 self.X_min = self.C[self.minimizer_pool] 

1460 # If function is called again and pool is found unbreak: 

1461 else: 

1462 self.X_min = [] 

1463 

1464 return self.X_min 

1465 

1466 def delaunay_triangulation(self, grow=False, n_prc=0): 

1467 if not grow: 

1468 self.Tri = spatial.Delaunay(self.C) 

1469 else: 

1470 if hasattr(self, 'Tri'): 

1471 self.Tri.add_points(self.C[n_prc:, :]) 

1472 else: 

1473 self.Tri = spatial.Delaunay(self.C, incremental=True) 

1474 

1475 return self.Tri 

1476 

1477 @staticmethod 

1478 def find_neighbors_delaunay(pindex, triang): 

1479 """ 

1480 Returns the indices of points connected to ``pindex`` on the Gabriel 

1481 chain subgraph of the Delaunay triangulation. 

1482 """ 

1483 return triang.vertex_neighbor_vertices[1][ 

1484 triang.vertex_neighbor_vertices[0][pindex]: 

1485 triang.vertex_neighbor_vertices[0][pindex + 1]] 

1486 

1487 def sample_delaunay_topo(self, ind): 

1488 self.Xi_ind_topo_i = [] 

1489 

1490 # Find the position of the sample in the component Gabrial chain 

1491 G_ind = self.find_neighbors_delaunay(ind, self.Tri) 

1492 

1493 # Find finite deference between each point 

1494 for g_i in G_ind: 

1495 rel_topo_bool = self.F[ind] < self.F[g_i] 

1496 self.Xi_ind_topo_i.append(rel_topo_bool) 

1497 

1498 # Check if minimizer 

1499 self.Xi_ind_topo = np.array(self.Xi_ind_topo_i).all() 

1500 

1501 return self.Xi_ind_topo 

1502 

1503 def delaunay_minimizers(self): 

1504 """ 

1505 Returns the indices of all minimizers 

1506 """ 

1507 self.minimizer_pool = [] 

1508 # Note: Can easily be parralized 

1509 if self.disp: 

1510 logging.info('self.fn = {}'.format(self.fn)) 

1511 logging.info('self.nc = {}'.format(self.nc)) 

1512 logging.info('np.shape(self.C)' 

1513 ' = {}'.format(np.shape(self.C))) 

1514 for ind in range(self.fn): 

1515 min_bool = self.sample_delaunay_topo(ind) 

1516 if min_bool: 

1517 self.minimizer_pool.append(ind) 

1518 

1519 self.minimizer_pool_F = self.F[self.minimizer_pool] 

1520 

1521 # Sort to find minimum func value in min_pool 

1522 self.sort_min_pool() 

1523 if self.disp: 

1524 logging.info('self.minimizer_pool = {}'.format(self.minimizer_pool)) 

1525 if not len(self.minimizer_pool) == 0: 

1526 self.X_min = self.C[self.minimizer_pool] 

1527 else: 

1528 self.X_min = [] # Empty pool breaks main routine 

1529 return self.X_min 

1530 

1531 

1532class LMap: 

1533 def __init__(self, v): 

1534 self.v = v 

1535 self.x_l = None 

1536 self.lres = None 

1537 self.f_min = None 

1538 self.lbounds = [] 

1539 

1540 

1541class LMapCache: 

1542 def __init__(self): 

1543 self.cache = {} 

1544 

1545 # Lists for search queries 

1546 self.v_maps = [] 

1547 self.xl_maps = [] 

1548 self.f_maps = [] 

1549 self.lbound_maps = [] 

1550 self.size = 0 

1551 

1552 def __getitem__(self, v): 

1553 v = np.ndarray.tolist(v) 

1554 v = tuple(v) 

1555 try: 

1556 return self.cache[v] 

1557 except KeyError: 

1558 xval = LMap(v) 

1559 self.cache[v] = xval 

1560 

1561 return self.cache[v] 

1562 

1563 def add_res(self, v, lres, bounds=None): 

1564 v = np.ndarray.tolist(v) 

1565 v = tuple(v) 

1566 self.cache[v].x_l = lres.x 

1567 self.cache[v].lres = lres 

1568 self.cache[v].f_min = lres.fun 

1569 self.cache[v].lbounds = bounds 

1570 

1571 # Update cache size 

1572 self.size += 1 

1573 

1574 # Cache lists for search queries 

1575 self.v_maps.append(v) 

1576 self.xl_maps.append(lres.x) 

1577 self.f_maps.append(lres.fun) 

1578 self.lbound_maps.append(bounds) 

1579 

1580 def sort_cache_result(self): 

1581 """ 

1582 Sort results and build the global return object 

1583 """ 

1584 results = {} 

1585 # Sort results and save 

1586 self.xl_maps = np.array(self.xl_maps) 

1587 self.f_maps = np.array(self.f_maps) 

1588 

1589 # Sorted indexes in Func_min 

1590 ind_sorted = np.argsort(self.f_maps) 

1591 

1592 # Save ordered list of minima 

1593 results['xl'] = self.xl_maps[ind_sorted] # Ordered x vals 

1594 self.f_maps = np.array(self.f_maps) 

1595 results['funl'] = self.f_maps[ind_sorted] 

1596 results['funl'] = results['funl'].T 

1597 

1598 # Find global of all minimizers 

1599 results['x'] = self.xl_maps[ind_sorted[0]] # Save global minima 

1600 results['fun'] = self.f_maps[ind_sorted[0]] # Save global fun value 

1601 

1602 self.xl_maps = np.ndarray.tolist(self.xl_maps) 

1603 self.f_maps = np.ndarray.tolist(self.f_maps) 

1604 return results