Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/scipy/optimize/_basinhopping.py: 16%
203 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-12 06:31 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-12 06:31 +0000
1"""
2basinhopping: The basinhopping global optimization algorithm
3"""
4import numpy as np
5import math
6import scipy.optimize
7from scipy._lib._util import check_random_state
9__all__ = ['basinhopping']
12class Storage:
13 """
14 Class used to store the lowest energy structure
15 """
16 def __init__(self, minres):
17 self._add(minres)
19 def _add(self, minres):
20 self.minres = minres
21 self.minres.x = np.copy(minres.x)
23 def update(self, minres):
24 if minres.fun < self.minres.fun:
25 self._add(minres)
26 return True
27 else:
28 return False
30 def get_lowest(self):
31 return self.minres
34class BasinHoppingRunner:
35 """This class implements the core of the basinhopping algorithm.
37 x0 : ndarray
38 The starting coordinates.
39 minimizer : callable
40 The local minimizer, with signature ``result = minimizer(x)``.
41 The return value is an `optimize.OptimizeResult` object.
42 step_taking : callable
43 This function displaces the coordinates randomly. Signature should
44 be ``x_new = step_taking(x)``. Note that `x` may be modified in-place.
45 accept_tests : list of callables
46 Each test is passed the kwargs `f_new`, `x_new`, `f_old` and
47 `x_old`. These tests will be used to judge whether or not to accept
48 the step. The acceptable return values are True, False, or ``"force
49 accept"``. If any of the tests return False then the step is rejected.
50 If ``"force accept"``, then this will override any other tests in
51 order to accept the step. This can be used, for example, to forcefully
52 escape from a local minimum that ``basinhopping`` is trapped in.
53 disp : bool, optional
54 Display status messages.
56 """
57 def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False):
58 self.x = np.copy(x0)
59 self.minimizer = minimizer
60 self.step_taking = step_taking
61 self.accept_tests = accept_tests
62 self.disp = disp
64 self.nstep = 0
66 # initialize return object
67 self.res = scipy.optimize.OptimizeResult()
68 self.res.minimization_failures = 0
70 # do initial minimization
71 minres = minimizer(self.x)
72 if not minres.success:
73 self.res.minimization_failures += 1
74 if self.disp:
75 print("warning: basinhopping: local minimization failure")
76 self.x = np.copy(minres.x)
77 self.energy = minres.fun
78 if self.disp:
79 print("basinhopping step %d: f %g" % (self.nstep, self.energy))
81 # initialize storage class
82 self.storage = Storage(minres)
84 if hasattr(minres, "nfev"):
85 self.res.nfev = minres.nfev
86 if hasattr(minres, "njev"):
87 self.res.njev = minres.njev
88 if hasattr(minres, "nhev"):
89 self.res.nhev = minres.nhev
91 def _monte_carlo_step(self):
92 """Do one Monte Carlo iteration
94 Randomly displace the coordinates, minimize, and decide whether
95 or not to accept the new coordinates.
96 """
97 # Take a random step. Make a copy of x because the step_taking
98 # algorithm might change x in place
99 x_after_step = np.copy(self.x)
100 x_after_step = self.step_taking(x_after_step)
102 # do a local minimization
103 minres = self.minimizer(x_after_step)
104 x_after_quench = minres.x
105 energy_after_quench = minres.fun
106 if not minres.success:
107 self.res.minimization_failures += 1
108 if self.disp:
109 print("warning: basinhopping: local minimization failure")
111 if hasattr(minres, "nfev"):
112 self.res.nfev += minres.nfev
113 if hasattr(minres, "njev"):
114 self.res.njev += minres.njev
115 if hasattr(minres, "nhev"):
116 self.res.nhev += minres.nhev
118 # accept the move based on self.accept_tests. If any test is False,
119 # then reject the step. If any test returns the special string
120 # 'force accept', then accept the step regardless. This can be used
121 # to forcefully escape from a local minimum if normal basin hopping
122 # steps are not sufficient.
123 accept = True
124 for test in self.accept_tests:
125 testres = test(f_new=energy_after_quench, x_new=x_after_quench,
126 f_old=self.energy, x_old=self.x)
127 if testres == 'force accept':
128 accept = True
129 break
130 elif testres is None:
131 raise ValueError("accept_tests must return True, False, or "
132 "'force accept'")
133 elif not testres:
134 accept = False
136 # Report the result of the acceptance test to the take step class.
137 # This is for adaptive step taking
138 if hasattr(self.step_taking, "report"):
139 self.step_taking.report(accept, f_new=energy_after_quench,
140 x_new=x_after_quench, f_old=self.energy,
141 x_old=self.x)
143 return accept, minres
145 def one_cycle(self):
146 """Do one cycle of the basinhopping algorithm
147 """
148 self.nstep += 1
149 new_global_min = False
151 accept, minres = self._monte_carlo_step()
153 if accept:
154 self.energy = minres.fun
155 self.x = np.copy(minres.x)
156 new_global_min = self.storage.update(minres)
158 # print some information
159 if self.disp:
160 self.print_report(minres.fun, accept)
161 if new_global_min:
162 print("found new global minimum on step %d with function"
163 " value %g" % (self.nstep, self.energy))
165 # save some variables as BasinHoppingRunner attributes
166 self.xtrial = minres.x
167 self.energy_trial = minres.fun
168 self.accept = accept
170 return new_global_min
172 def print_report(self, energy_trial, accept):
173 """print a status update"""
174 minres = self.storage.get_lowest()
175 print("basinhopping step %d: f %g trial_f %g accepted %d "
176 " lowest_f %g" % (self.nstep, self.energy, energy_trial,
177 accept, minres.fun))
180class AdaptiveStepsize:
181 """
182 Class to implement adaptive stepsize.
184 This class wraps the step taking class and modifies the stepsize to
185 ensure the true acceptance rate is as close as possible to the target.
187 Parameters
188 ----------
189 takestep : callable
190 The step taking routine. Must contain modifiable attribute
191 takestep.stepsize
192 accept_rate : float, optional
193 The target step acceptance rate
194 interval : int, optional
195 Interval for how often to update the stepsize
196 factor : float, optional
197 The step size is multiplied or divided by this factor upon each
198 update.
199 verbose : bool, optional
200 Print information about each update
202 """
203 def __init__(self, takestep, accept_rate=0.5, interval=50, factor=0.9,
204 verbose=True):
205 self.takestep = takestep
206 self.target_accept_rate = accept_rate
207 self.interval = interval
208 self.factor = factor
209 self.verbose = verbose
211 self.nstep = 0
212 self.nstep_tot = 0
213 self.naccept = 0
215 def __call__(self, x):
216 return self.take_step(x)
218 def _adjust_step_size(self):
219 old_stepsize = self.takestep.stepsize
220 accept_rate = float(self.naccept) / self.nstep
221 if accept_rate > self.target_accept_rate:
222 # We're accepting too many steps. This generally means we're
223 # trapped in a basin. Take bigger steps.
224 self.takestep.stepsize /= self.factor
225 else:
226 # We're not accepting enough steps. Take smaller steps.
227 self.takestep.stepsize *= self.factor
228 if self.verbose:
229 print("adaptive stepsize: acceptance rate %f target %f new "
230 "stepsize %g old stepsize %g" % (accept_rate,
231 self.target_accept_rate, self.takestep.stepsize,
232 old_stepsize))
234 def take_step(self, x):
235 self.nstep += 1
236 self.nstep_tot += 1
237 if self.nstep % self.interval == 0:
238 self._adjust_step_size()
239 return self.takestep(x)
241 def report(self, accept, **kwargs):
242 "called by basinhopping to report the result of the step"
243 if accept:
244 self.naccept += 1
247class RandomDisplacement:
248 """Add a random displacement of maximum size `stepsize` to each coordinate.
250 Calling this updates `x` in-place.
252 Parameters
253 ----------
254 stepsize : float, optional
255 Maximum stepsize in any dimension
256 random_gen : {None, int, `numpy.random.Generator`,
257 `numpy.random.RandomState`}, optional
259 If `seed` is None (or `np.random`), the `numpy.random.RandomState`
260 singleton is used.
261 If `seed` is an int, a new ``RandomState`` instance is used,
262 seeded with `seed`.
263 If `seed` is already a ``Generator`` or ``RandomState`` instance then
264 that instance is used.
266 """
268 def __init__(self, stepsize=0.5, random_gen=None):
269 self.stepsize = stepsize
270 self.random_gen = check_random_state(random_gen)
272 def __call__(self, x):
273 x += self.random_gen.uniform(-self.stepsize, self.stepsize,
274 np.shape(x))
275 return x
278class MinimizerWrapper:
279 """
280 wrap a minimizer function as a minimizer class
281 """
282 def __init__(self, minimizer, func=None, **kwargs):
283 self.minimizer = minimizer
284 self.func = func
285 self.kwargs = kwargs
287 def __call__(self, x0):
288 if self.func is None:
289 return self.minimizer(x0, **self.kwargs)
290 else:
291 return self.minimizer(self.func, x0, **self.kwargs)
294class Metropolis:
295 """Metropolis acceptance criterion.
297 Parameters
298 ----------
299 T : float
300 The "temperature" parameter for the accept or reject criterion.
301 random_gen : {None, int, `numpy.random.Generator`,
302 `numpy.random.RandomState`}, optional
304 If `seed` is None (or `np.random`), the `numpy.random.RandomState`
305 singleton is used.
306 If `seed` is an int, a new ``RandomState`` instance is used,
307 seeded with `seed`.
308 If `seed` is already a ``Generator`` or ``RandomState`` instance then
309 that instance is used.
310 Random number generator used for acceptance test.
312 """
314 def __init__(self, T, random_gen=None):
315 # Avoid ZeroDivisionError since "MBH can be regarded as a special case
316 # of the BH framework with the Metropolis criterion, where temperature
317 # T = 0." (Reject all steps that increase energy.)
318 self.beta = 1.0 / T if T != 0 else float('inf')
319 self.random_gen = check_random_state(random_gen)
321 def accept_reject(self, energy_new, energy_old):
322 """
323 If new energy is lower than old, it will always be accepted.
324 If new is higher than old, there is a chance it will be accepted,
325 less likely for larger differences.
326 """
327 with np.errstate(invalid='ignore'):
328 # The energy values being fed to Metropolis are 1-length arrays, and if
329 # they are equal, their difference is 0, which gets multiplied by beta,
330 # which is inf, and array([0]) * float('inf') causes
331 #
332 # RuntimeWarning: invalid value encountered in multiply
333 #
334 # Ignore this warning so when the algorithm is on a flat plane, it always
335 # accepts the step, to try to move off the plane.
336 prod = -(energy_new - energy_old) * self.beta
337 w = math.exp(min(0, prod))
339 rand = self.random_gen.uniform()
340 return w >= rand
342 def __call__(self, **kwargs):
343 """
344 f_new and f_old are mandatory in kwargs
345 """
346 return bool(self.accept_reject(kwargs["f_new"],
347 kwargs["f_old"]))
350def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5,
351 minimizer_kwargs=None, take_step=None, accept_test=None,
352 callback=None, interval=50, disp=False, niter_success=None,
353 seed=None, *, target_accept_rate=0.5, stepwise_factor=0.9):
354 """Find the global minimum of a function using the basin-hopping algorithm.
356 Basin-hopping is a two-phase method that combines a global stepping
357 algorithm with local minimization at each step. Designed to mimic
358 the natural process of energy minimization of clusters of atoms, it works
359 well for similar problems with "funnel-like, but rugged" energy landscapes
360 [5]_.
362 As the step-taking, step acceptance, and minimization methods are all
363 customizable, this function can also be used to implement other two-phase
364 methods.
366 Parameters
367 ----------
368 func : callable ``f(x, *args)``
369 Function to be optimized. ``args`` can be passed as an optional item
370 in the dict `minimizer_kwargs`
371 x0 : array_like
372 Initial guess.
373 niter : integer, optional
374 The number of basin-hopping iterations. There will be a total of
375 ``niter + 1`` runs of the local minimizer.
376 T : float, optional
377 The "temperature" parameter for the acceptance or rejection criterion.
378 Higher "temperatures" mean that larger jumps in function value will be
379 accepted. For best results `T` should be comparable to the
380 separation (in function value) between local minima.
381 stepsize : float, optional
382 Maximum step size for use in the random displacement.
383 minimizer_kwargs : dict, optional
384 Extra keyword arguments to be passed to the local minimizer
385 `scipy.optimize.minimize` Some important options could be:
387 method : str
388 The minimization method (e.g. ``"L-BFGS-B"``)
389 args : tuple
390 Extra arguments passed to the objective function (`func`) and
391 its derivatives (Jacobian, Hessian).
393 take_step : callable ``take_step(x)``, optional
394 Replace the default step-taking routine with this routine. The default
395 step-taking routine is a random displacement of the coordinates, but
396 other step-taking algorithms may be better for some systems.
397 `take_step` can optionally have the attribute ``take_step.stepsize``.
398 If this attribute exists, then `basinhopping` will adjust
399 ``take_step.stepsize`` in order to try to optimize the global minimum
400 search.
401 accept_test : callable, ``accept_test(f_new=f_new, x_new=x_new, f_old=fold, x_old=x_old)``, optional
402 Define a test which will be used to judge whether to accept the
403 step. This will be used in addition to the Metropolis test based on
404 "temperature" `T`. The acceptable return values are True,
405 False, or ``"force accept"``. If any of the tests return False
406 then the step is rejected. If the latter, then this will override any
407 other tests in order to accept the step. This can be used, for example,
408 to forcefully escape from a local minimum that `basinhopping` is
409 trapped in.
410 callback : callable, ``callback(x, f, accept)``, optional
411 A callback function which will be called for all minima found. ``x``
412 and ``f`` are the coordinates and function value of the trial minimum,
413 and ``accept`` is whether that minimum was accepted. This can
414 be used, for example, to save the lowest N minima found. Also,
415 `callback` can be used to specify a user defined stop criterion by
416 optionally returning True to stop the `basinhopping` routine.
417 interval : integer, optional
418 interval for how often to update the `stepsize`
419 disp : bool, optional
420 Set to True to print status messages
421 niter_success : integer, optional
422 Stop the run if the global minimum candidate remains the same for this
423 number of iterations.
424 seed : {None, int, `numpy.random.Generator`, `numpy.random.RandomState`}, optional
426 If `seed` is None (or `np.random`), the `numpy.random.RandomState`
427 singleton is used.
428 If `seed` is an int, a new ``RandomState`` instance is used,
429 seeded with `seed`.
430 If `seed` is already a ``Generator`` or ``RandomState`` instance then
431 that instance is used.
432 Specify `seed` for repeatable minimizations. The random numbers
433 generated with this seed only affect the default Metropolis
434 `accept_test` and the default `take_step`. If you supply your own
435 `take_step` and `accept_test`, and these functions use random
436 number generation, then those functions are responsible for the state
437 of their random number generator.
438 target_accept_rate : float, optional
439 The target acceptance rate that is used to adjust the `stepsize`.
440 If the current acceptance rate is greater than the target,
441 then the `stepsize` is increased. Otherwise, it is decreased.
442 Range is (0, 1). Default is 0.5.
444 .. versionadded:: 1.8.0
446 stepwise_factor : float, optional
447 The `stepsize` is multiplied or divided by this stepwise factor upon
448 each update. Range is (0, 1). Default is 0.9.
450 .. versionadded:: 1.8.0
452 Returns
453 -------
454 res : OptimizeResult
455 The optimization result represented as a `OptimizeResult` object.
456 Important attributes are: ``x`` the solution array, ``fun`` the value
457 of the function at the solution, and ``message`` which describes the
458 cause of the termination. The ``OptimizeResult`` object returned by the
459 selected minimizer at the lowest minimum is also contained within this
460 object and can be accessed through the ``lowest_optimization_result``
461 attribute. See `OptimizeResult` for a description of other attributes.
463 See Also
464 --------
465 minimize :
466 The local minimization function called once for each basinhopping step.
467 `minimizer_kwargs` is passed to this routine.
469 Notes
470 -----
471 Basin-hopping is a stochastic algorithm which attempts to find the global
472 minimum of a smooth scalar function of one or more variables [1]_ [2]_ [3]_
473 [4]_. The algorithm in its current form was described by David Wales and
474 Jonathan Doye [2]_ http://www-wales.ch.cam.ac.uk/.
476 The algorithm is iterative with each cycle composed of the following
477 features
479 1) random perturbation of the coordinates
481 2) local minimization
483 3) accept or reject the new coordinates based on the minimized function
484 value
486 The acceptance test used here is the Metropolis criterion of standard Monte
487 Carlo algorithms, although there are many other possibilities [3]_.
489 This global minimization method has been shown to be extremely efficient
490 for a wide variety of problems in physics and chemistry. It is
491 particularly useful when the function has many minima separated by large
492 barriers. See the `Cambridge Cluster Database
493 <https://www-wales.ch.cam.ac.uk/CCD.html>`_ for databases of molecular
494 systems that have been optimized primarily using basin-hopping. This
495 database includes minimization problems exceeding 300 degrees of freedom.
497 See the free software program `GMIN <https://www-wales.ch.cam.ac.uk/GMIN>`_
498 for a Fortran implementation of basin-hopping. This implementation has many
499 variations of the procedure described above, including more
500 advanced step taking algorithms and alternate acceptance criterion.
502 For stochastic global optimization there is no way to determine if the true
503 global minimum has actually been found. Instead, as a consistency check,
504 the algorithm can be run from a number of different random starting points
505 to ensure the lowest minimum found in each example has converged to the
506 global minimum. For this reason, `basinhopping` will by default simply
507 run for the number of iterations `niter` and return the lowest minimum
508 found. It is left to the user to ensure that this is in fact the global
509 minimum.
511 Choosing `stepsize`: This is a crucial parameter in `basinhopping` and
512 depends on the problem being solved. The step is chosen uniformly in the
513 region from x0-stepsize to x0+stepsize, in each dimension. Ideally, it
514 should be comparable to the typical separation (in argument values) between
515 local minima of the function being optimized. `basinhopping` will, by
516 default, adjust `stepsize` to find an optimal value, but this may take
517 many iterations. You will get quicker results if you set a sensible
518 initial value for ``stepsize``.
520 Choosing `T`: The parameter `T` is the "temperature" used in the
521 Metropolis criterion. Basinhopping steps are always accepted if
522 ``func(xnew) < func(xold)``. Otherwise, they are accepted with
523 probability::
525 exp( -(func(xnew) - func(xold)) / T )
527 So, for best results, `T` should to be comparable to the typical
528 difference (in function values) between local minima. (The height of
529 "walls" between local minima is irrelevant.)
531 If `T` is 0, the algorithm becomes Monotonic Basin-Hopping, in which all
532 steps that increase energy are rejected.
534 .. versionadded:: 0.12.0
536 References
537 ----------
538 .. [1] Wales, David J. 2003, Energy Landscapes, Cambridge University Press,
539 Cambridge, UK.
540 .. [2] Wales, D J, and Doye J P K, Global Optimization by Basin-Hopping and
541 the Lowest Energy Structures of Lennard-Jones Clusters Containing up to
542 110 Atoms. Journal of Physical Chemistry A, 1997, 101, 5111.
543 .. [3] Li, Z. and Scheraga, H. A., Monte Carlo-minimization approach to the
544 multiple-minima problem in protein folding, Proc. Natl. Acad. Sci. USA,
545 1987, 84, 6611.
546 .. [4] Wales, D. J. and Scheraga, H. A., Global optimization of clusters,
547 crystals, and biomolecules, Science, 1999, 285, 1368.
548 .. [5] Olson, B., Hashmi, I., Molloy, K., and Shehu1, A., Basin Hopping as
549 a General and Versatile Optimization Framework for the Characterization
550 of Biological Macromolecules, Advances in Artificial Intelligence,
551 Volume 2012 (2012), Article ID 674832, :doi:`10.1155/2012/674832`
553 Examples
554 --------
555 The following example is a 1-D minimization problem, with many
556 local minima superimposed on a parabola.
558 >>> import numpy as np
559 >>> from scipy.optimize import basinhopping
560 >>> func = lambda x: np.cos(14.5 * x - 0.3) + (x + 0.2) * x
561 >>> x0 = [1.]
563 Basinhopping, internally, uses a local minimization algorithm. We will use
564 the parameter `minimizer_kwargs` to tell basinhopping which algorithm to
565 use and how to set up that minimizer. This parameter will be passed to
566 `scipy.optimize.minimize`.
568 >>> minimizer_kwargs = {"method": "BFGS"}
569 >>> ret = basinhopping(func, x0, minimizer_kwargs=minimizer_kwargs,
570 ... niter=200)
571 >>> print("global minimum: x = %.4f, f(x) = %.4f" % (ret.x, ret.fun))
572 global minimum: x = -0.1951, f(x) = -1.0009
574 Next consider a 2-D minimization problem. Also, this time, we
575 will use gradient information to significantly speed up the search.
577 >>> def func2d(x):
578 ... f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] +
579 ... 0.2) * x[0]
580 ... df = np.zeros(2)
581 ... df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2
582 ... df[1] = 2. * x[1] + 0.2
583 ... return f, df
585 We'll also use a different local minimization algorithm. Also, we must tell
586 the minimizer that our function returns both energy and gradient (Jacobian).
588 >>> minimizer_kwargs = {"method":"L-BFGS-B", "jac":True}
589 >>> x0 = [1.0, 1.0]
590 >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
591 ... niter=200)
592 >>> print("global minimum: x = [%.4f, %.4f], f(x) = %.4f" % (ret.x[0],
593 ... ret.x[1],
594 ... ret.fun))
595 global minimum: x = [-0.1951, -0.1000], f(x) = -1.0109
597 Here is an example using a custom step-taking routine. Imagine you want
598 the first coordinate to take larger steps than the rest of the coordinates.
599 This can be implemented like so:
601 >>> class MyTakeStep:
602 ... def __init__(self, stepsize=0.5):
603 ... self.stepsize = stepsize
604 ... self.rng = np.random.default_rng()
605 ... def __call__(self, x):
606 ... s = self.stepsize
607 ... x[0] += self.rng.uniform(-2.*s, 2.*s)
608 ... x[1:] += self.rng.uniform(-s, s, x[1:].shape)
609 ... return x
611 Since ``MyTakeStep.stepsize`` exists basinhopping will adjust the magnitude
612 of `stepsize` to optimize the search. We'll use the same 2-D function as
613 before
615 >>> mytakestep = MyTakeStep()
616 >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
617 ... niter=200, take_step=mytakestep)
618 >>> print("global minimum: x = [%.4f, %.4f], f(x) = %.4f" % (ret.x[0],
619 ... ret.x[1],
620 ... ret.fun))
621 global minimum: x = [-0.1951, -0.1000], f(x) = -1.0109
623 Now, let's do an example using a custom callback function which prints the
624 value of every minimum found
626 >>> def print_fun(x, f, accepted):
627 ... print("at minimum %.4f accepted %d" % (f, int(accepted)))
629 We'll run it for only 10 basinhopping steps this time.
631 >>> rng = np.random.default_rng()
632 >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
633 ... niter=10, callback=print_fun, seed=rng)
634 at minimum 0.4159 accepted 1
635 at minimum -0.4317 accepted 1
636 at minimum -1.0109 accepted 1
637 at minimum -0.9073 accepted 1
638 at minimum -0.4317 accepted 0
639 at minimum -0.1021 accepted 1
640 at minimum -0.7425 accepted 1
641 at minimum -0.9073 accepted 1
642 at minimum -0.4317 accepted 0
643 at minimum -0.7425 accepted 1
644 at minimum -0.9073 accepted 1
646 The minimum at -1.0109 is actually the global minimum, found already on the
647 8th iteration.
649 """
650 if target_accept_rate <= 0. or target_accept_rate >= 1.:
651 raise ValueError('target_accept_rate has to be in range (0, 1)')
652 if stepwise_factor <= 0. or stepwise_factor >= 1.:
653 raise ValueError('stepwise_factor has to be in range (0, 1)')
655 x0 = np.array(x0)
657 # set up the np.random generator
658 rng = check_random_state(seed)
660 # set up minimizer
661 if minimizer_kwargs is None:
662 minimizer_kwargs = dict()
663 wrapped_minimizer = MinimizerWrapper(scipy.optimize.minimize, func,
664 **minimizer_kwargs)
666 # set up step-taking algorithm
667 if take_step is not None:
668 if not callable(take_step):
669 raise TypeError("take_step must be callable")
670 # if take_step.stepsize exists then use AdaptiveStepsize to control
671 # take_step.stepsize
672 if hasattr(take_step, "stepsize"):
673 take_step_wrapped = AdaptiveStepsize(
674 take_step, interval=interval,
675 accept_rate=target_accept_rate,
676 factor=stepwise_factor,
677 verbose=disp)
678 else:
679 take_step_wrapped = take_step
680 else:
681 # use default
682 displace = RandomDisplacement(stepsize=stepsize, random_gen=rng)
683 take_step_wrapped = AdaptiveStepsize(displace, interval=interval,
684 accept_rate=target_accept_rate,
685 factor=stepwise_factor,
686 verbose=disp)
688 # set up accept tests
689 accept_tests = []
690 if accept_test is not None:
691 if not callable(accept_test):
692 raise TypeError("accept_test must be callable")
693 accept_tests = [accept_test]
695 # use default
696 metropolis = Metropolis(T, random_gen=rng)
697 accept_tests.append(metropolis)
699 if niter_success is None:
700 niter_success = niter + 2
702 bh = BasinHoppingRunner(x0, wrapped_minimizer, take_step_wrapped,
703 accept_tests, disp=disp)
705 # The wrapped minimizer is called once during construction of
706 # BasinHoppingRunner, so run the callback
707 if callable(callback):
708 callback(bh.storage.minres.x, bh.storage.minres.fun, True)
710 # start main iteration loop
711 count, i = 0, 0
712 message = ["requested number of basinhopping iterations completed"
713 " successfully"]
714 for i in range(niter):
715 new_global_min = bh.one_cycle()
717 if callable(callback):
718 # should we pass a copy of x?
719 val = callback(bh.xtrial, bh.energy_trial, bh.accept)
720 if val is not None:
721 if val:
722 message = ["callback function requested stop early by"
723 "returning True"]
724 break
726 count += 1
727 if new_global_min:
728 count = 0
729 elif count > niter_success:
730 message = ["success condition satisfied"]
731 break
733 # prepare return object
734 res = bh.res
735 res.lowest_optimization_result = bh.storage.get_lowest()
736 res.x = np.copy(res.lowest_optimization_result.x)
737 res.fun = res.lowest_optimization_result.fun
738 res.message = message
739 res.nit = i + 1
740 res.success = res.lowest_optimization_result.success
741 return res