Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/scipy/optimize/_direct_py.py: 19%
59 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-12 06:31 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-12 06:31 +0000
1from __future__ import annotations
2from typing import (
3 Any, Callable, Iterable, Optional, Tuple, TYPE_CHECKING, Union
4)
6import numpy as np
7from scipy.optimize import OptimizeResult
8from ._constraints import old_bound_to_new, Bounds
9from ._direct import direct as _direct # type: ignore
11if TYPE_CHECKING:
12 import numpy.typing as npt
13 from _typeshed import NoneType
15__all__ = ['direct']
17ERROR_MESSAGES = (
18 "Number of function evaluations done is larger than maxfun={}",
19 "Number of iterations is larger than maxiter={}",
20 "u[i] < l[i] for some i",
21 "maxfun is too large",
22 "Initialization failed",
23 "There was an error in the creation of the sample points",
24 "An error occured while the function was sampled",
25 "Maximum number of levels has been reached.",
26 "Forced stop",
27 "Invalid arguments",
28 "Out of memory",
29)
31SUCCESS_MESSAGES = (
32 ("The best function value found is within a relative error={} "
33 "of the (known) global optimum f_min"),
34 ("The volume of the hyperrectangle containing the lowest function value "
35 "found is below vol_tol={}"),
36 ("The side length measure of the hyperrectangle containing the lowest "
37 "function value found is below len_tol={}"),
38)
41def direct(
42 func: Callable[[npt.ArrayLike, Tuple[Any]], float],
43 bounds: Union[Iterable, Bounds],
44 *,
45 args: tuple = (),
46 eps: float = 1e-4,
47 maxfun: Union[int, None] = None,
48 maxiter: int = 1000,
49 locally_biased: bool = True,
50 f_min: float = -np.inf,
51 f_min_rtol: float = 1e-4,
52 vol_tol: float = 1e-16,
53 len_tol: float = 1e-6,
54 callback: Optional[Callable[[npt.ArrayLike], NoneType]] = None
55) -> OptimizeResult:
56 """
57 Finds the global minimum of a function using the
58 DIRECT algorithm.
60 Parameters
61 ----------
62 func : callable
63 The objective function to be minimized.
64 ``func(x, *args) -> float``
65 where ``x`` is an 1-D array with shape (n,) and ``args`` is a tuple of
66 the fixed parameters needed to completely specify the function.
67 bounds : sequence or `Bounds`
68 Bounds for variables. There are two ways to specify the bounds:
70 1. Instance of `Bounds` class.
71 2. ``(min, max)`` pairs for each element in ``x``.
73 args : tuple, optional
74 Any additional fixed parameters needed to
75 completely specify the objective function.
76 eps : float, optional
77 Minimal required difference of the objective function values
78 between the current best hyperrectangle and the next potentially
79 optimal hyperrectangle to be divided. In consequence, `eps` serves as a
80 tradeoff between local and global search: the smaller, the more local
81 the search becomes. Default is 1e-4.
82 maxfun : int or None, optional
83 Approximate upper bound on objective function evaluations.
84 If `None`, will be automatically set to ``1000 * N`` where ``N``
85 represents the number of dimensions. Will be capped if necessary to
86 limit DIRECT's RAM usage to app. 1GiB. This will only occur for very
87 high dimensional problems and excessive `max_fun`. Default is `None`.
88 maxiter : int, optional
89 Maximum number of iterations. Default is 1000.
90 locally_biased : bool, optional
91 If `True` (default), use the locally biased variant of the
92 algorithm known as DIRECT_L. If `False`, use the original unbiased
93 DIRECT algorithm. For hard problems with many local minima,
94 `False` is recommended.
95 f_min : float, optional
96 Function value of the global optimum. Set this value only if the
97 global optimum is known. Default is ``-np.inf``, so that this
98 termination criterion is deactivated.
99 f_min_rtol : float, optional
100 Terminate the optimization once the relative error between the
101 current best minimum `f` and the supplied global minimum `f_min`
102 is smaller than `f_min_rtol`. This parameter is only used if
103 `f_min` is also set. Must lie between 0 and 1. Default is 1e-4.
104 vol_tol : float, optional
105 Terminate the optimization once the volume of the hyperrectangle
106 containing the lowest function value is smaller than `vol_tol`
107 of the complete search space. Must lie between 0 and 1.
108 Default is 1e-16.
109 len_tol : float, optional
110 If `locally_biased=True`, terminate the optimization once half of
111 the normalized maximal side length of the hyperrectangle containing
112 the lowest function value is smaller than `len_tol`.
113 If `locally_biased=False`, terminate the optimization once half of
114 the normalized diagonal of the hyperrectangle containing the lowest
115 function value is smaller than `len_tol`. Must lie between 0 and 1.
116 Default is 1e-6.
117 callback : callable, optional
118 A callback function with signature ``callback(xk)`` where ``xk``
119 represents the best function value found so far.
121 Returns
122 -------
123 res : OptimizeResult
124 The optimization result represented as a ``OptimizeResult`` object.
125 Important attributes are: ``x`` the solution array, ``success`` a
126 Boolean flag indicating if the optimizer exited successfully and
127 ``message`` which describes the cause of the termination. See
128 `OptimizeResult` for a description of other attributes.
130 Notes
131 -----
132 DIviding RECTangles (DIRECT) is a deterministic global
133 optimization algorithm capable of minimizing a black box function with
134 its variables subject to lower and upper bound constraints by sampling
135 potential solutions in the search space [1]_. The algorithm starts by
136 normalising the search space to an n-dimensional unit hypercube.
137 It samples the function at the center of this hypercube and at 2n
138 (n is the number of variables) more points, 2 in each coordinate
139 direction. Using these function values, DIRECT then divides the
140 domain into hyperrectangles, each having exactly one of the sampling
141 points as its center. In each iteration, DIRECT chooses, using the `eps`
142 parameter which defaults to 1e-4, some of the existing hyperrectangles
143 to be further divided. This division process continues until either the
144 maximum number of iterations or maximum function evaluations allowed
145 are exceeded, or the hyperrectangle containing the minimal value found
146 so far becomes small enough. If `f_min` is specified, the optimization
147 will stop once this function value is reached within a relative tolerance.
148 The locally biased variant of DIRECT (originally called DIRECT_L) [2]_ is
149 used by default. It makes the search more locally biased and more
150 efficient for cases with only a few local minima.
152 A note about termination criteria: `vol_tol` refers to the volume of the
153 hyperrectangle containing the lowest function value found so far. This
154 volume decreases exponentially with increasing dimensionality of the
155 problem. Therefore `vol_tol` should be decreased to avoid premature
156 termination of the algorithm for higher dimensions. This does not hold
157 for `len_tol`: it refers either to half of the maximal side length
158 (for ``locally_biased=True``) or half of the diagonal of the
159 hyperrectangle (for ``locally_biased=False``).
161 This code is based on the DIRECT 2.0.4 Fortran code by Gablonsky et al. at
162 https://ctk.math.ncsu.edu/SOFTWARE/DIRECTv204.tar.gz .
163 This original version was initially converted via f2c and then cleaned up
164 and reorganized by Steven G. Johnson, August 2007, for the NLopt project.
165 The `direct` function wraps the C implementation.
167 .. versionadded:: 1.9.0
169 References
170 ----------
171 .. [1] Jones, D.R., Perttunen, C.D. & Stuckman, B.E. Lipschitzian
172 optimization without the Lipschitz constant. J Optim Theory Appl
173 79, 157-181 (1993).
174 .. [2] Gablonsky, J., Kelley, C. A Locally-Biased form of the DIRECT
175 Algorithm. Journal of Global Optimization 21, 27-37 (2001).
177 Examples
178 --------
179 The following example is a 2-D problem with four local minima: minimizing
180 the Styblinski-Tang function
181 (https://en.wikipedia.org/wiki/Test_functions_for_optimization).
183 >>> from scipy.optimize import direct, Bounds
184 >>> def styblinski_tang(pos):
185 ... x, y = pos
186 ... return 0.5 * (x**4 - 16*x**2 + 5*x + y**4 - 16*y**2 + 5*y)
187 >>> bounds = Bounds([-4., -4.], [4., 4.])
188 >>> result = direct(styblinski_tang, bounds)
189 >>> result.x, result.fun, result.nfev
190 array([-2.90321597, -2.90321597]), -78.3323279095383, 2011
192 The correct global minimum was found but with a huge number of function
193 evaluations (2011). Loosening the termination tolerances `vol_tol` and
194 `len_tol` can be used to stop DIRECT earlier.
196 >>> result = direct(styblinski_tang, bounds, len_tol=1e-3)
197 >>> result.x, result.fun, result.nfev
198 array([-2.9044353, -2.9044353]), -78.33230330754142, 207
200 """
201 # convert bounds to new Bounds class if necessary
202 if not isinstance(bounds, Bounds):
203 if isinstance(bounds, list) or isinstance(bounds, tuple):
204 lb, ub = old_bound_to_new(bounds)
205 bounds = Bounds(lb, ub)
206 else:
207 message = ("bounds must be a sequence or "
208 "instance of Bounds class")
209 raise ValueError(message)
211 lb = np.ascontiguousarray(bounds.lb, dtype=np.float64)
212 ub = np.ascontiguousarray(bounds.ub, dtype=np.float64)
214 # validate bounds
215 # check that lower bounds are smaller than upper bounds
216 if not np.all(lb < ub):
217 raise ValueError('Bounds are not consistent min < max')
218 # check for infs
219 if (np.any(np.isinf(lb)) or np.any(np.isinf(ub))):
220 raise ValueError("Bounds must not be inf.")
222 # validate tolerances
223 if (vol_tol < 0 or vol_tol > 1):
224 raise ValueError("vol_tol must be between 0 and 1.")
225 if (len_tol < 0 or len_tol > 1):
226 raise ValueError("len_tol must be between 0 and 1.")
227 if (f_min_rtol < 0 or f_min_rtol > 1):
228 raise ValueError("f_min_rtol must be between 0 and 1.")
230 # validate maxfun and maxiter
231 if maxfun is None:
232 maxfun = 1000 * lb.shape[0]
233 if not isinstance(maxfun, int):
234 raise ValueError("maxfun must be of type int.")
235 if maxfun < 0:
236 raise ValueError("maxfun must be > 0.")
237 if not isinstance(maxiter, int):
238 raise ValueError("maxiter must be of type int.")
239 if maxiter < 0:
240 raise ValueError("maxiter must be > 0.")
242 # validate boolean parameters
243 if not isinstance(locally_biased, bool):
244 raise ValueError("locally_biased must be True or False.")
246 def _func_wrap(x, args=None):
247 x = np.asarray(x)
248 if args is None:
249 f = func(x)
250 else:
251 f = func(x, *args)
252 # always return a float
253 return np.asarray(f).item()
255 # TODO: fix disp argument
256 x, fun, ret_code, nfev, nit = _direct(
257 _func_wrap,
258 np.asarray(lb), np.asarray(ub),
259 args,
260 False, eps, maxfun, maxiter,
261 locally_biased,
262 f_min, f_min_rtol,
263 vol_tol, len_tol, callback
264 )
266 format_val = (maxfun, maxiter, f_min_rtol, vol_tol, len_tol)
267 if ret_code > 2:
268 message = SUCCESS_MESSAGES[ret_code - 3].format(
269 format_val[ret_code - 1])
270 elif 0 < ret_code <= 2:
271 message = ERROR_MESSAGES[ret_code - 1].format(format_val[ret_code - 1])
272 elif 0 > ret_code > -100:
273 message = ERROR_MESSAGES[abs(ret_code) + 1]
274 else:
275 message = ERROR_MESSAGES[ret_code + 99]
277 return OptimizeResult(x=np.asarray(x), fun=fun, status=ret_code,
278 success=ret_code > 2, message=message,
279 nfev=nfev, nit=nit)