Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/scipy/optimize/_cobyla_py.py: 23%
84 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-12 06:31 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-12 06:31 +0000
1"""
2Interface to Constrained Optimization By Linear Approximation
4Functions
5---------
6.. autosummary::
7 :toctree: generated/
9 fmin_cobyla
11"""
13import functools
14from threading import RLock
16import numpy as np
17from scipy.optimize import _cobyla as cobyla
18from ._optimize import OptimizeResult, _check_unknown_options
19try:
20 from itertools import izip
21except ImportError:
22 izip = zip
24__all__ = ['fmin_cobyla']
26# Workarund as _cobyla.minimize is not threadsafe
27# due to an unknown f2py bug and can segfault,
28# see gh-9658.
29_module_lock = RLock()
30def synchronized(func):
31 @functools.wraps(func)
32 def wrapper(*args, **kwargs):
33 with _module_lock:
34 return func(*args, **kwargs)
35 return wrapper
37@synchronized
38def fmin_cobyla(func, x0, cons, args=(), consargs=None, rhobeg=1.0,
39 rhoend=1e-4, maxfun=1000, disp=None, catol=2e-4,
40 *, callback=None):
41 """
42 Minimize a function using the Constrained Optimization By Linear
43 Approximation (COBYLA) method. This method wraps a FORTRAN
44 implementation of the algorithm.
46 Parameters
47 ----------
48 func : callable
49 Function to minimize. In the form func(x, \\*args).
50 x0 : ndarray
51 Initial guess.
52 cons : sequence
53 Constraint functions; must all be ``>=0`` (a single function
54 if only 1 constraint). Each function takes the parameters `x`
55 as its first argument, and it can return either a single number or
56 an array or list of numbers.
57 args : tuple, optional
58 Extra arguments to pass to function.
59 consargs : tuple, optional
60 Extra arguments to pass to constraint functions (default of None means
61 use same extra arguments as those passed to func).
62 Use ``()`` for no extra arguments.
63 rhobeg : float, optional
64 Reasonable initial changes to the variables.
65 rhoend : float, optional
66 Final accuracy in the optimization (not precisely guaranteed). This
67 is a lower bound on the size of the trust region.
68 disp : {0, 1, 2, 3}, optional
69 Controls the frequency of output; 0 implies no output.
70 maxfun : int, optional
71 Maximum number of function evaluations.
72 catol : float, optional
73 Absolute tolerance for constraint violations.
74 callback : callable, optional
75 Called after each iteration, as ``callback(x)``, where ``x`` is the
76 current parameter vector.
78 Returns
79 -------
80 x : ndarray
81 The argument that minimises `f`.
83 See also
84 --------
85 minimize: Interface to minimization algorithms for multivariate
86 functions. See the 'COBYLA' `method` in particular.
88 Notes
89 -----
90 This algorithm is based on linear approximations to the objective
91 function and each constraint. We briefly describe the algorithm.
93 Suppose the function is being minimized over k variables. At the
94 jth iteration the algorithm has k+1 points v_1, ..., v_(k+1),
95 an approximate solution x_j, and a radius RHO_j.
96 (i.e., linear plus a constant) approximations to the objective
97 function and constraint functions such that their function values
98 agree with the linear approximation on the k+1 points v_1,.., v_(k+1).
99 This gives a linear program to solve (where the linear approximations
100 of the constraint functions are constrained to be non-negative).
102 However, the linear approximations are likely only good
103 approximations near the current simplex, so the linear program is
104 given the further requirement that the solution, which
105 will become x_(j+1), must be within RHO_j from x_j. RHO_j only
106 decreases, never increases. The initial RHO_j is rhobeg and the
107 final RHO_j is rhoend. In this way COBYLA's iterations behave
108 like a trust region algorithm.
110 Additionally, the linear program may be inconsistent, or the
111 approximation may give poor improvement. For details about
112 how these issues are resolved, as well as how the points v_i are
113 updated, refer to the source code or the references below.
116 References
117 ----------
118 Powell M.J.D. (1994), "A direct search optimization method that models
119 the objective and constraint functions by linear interpolation.", in
120 Advances in Optimization and Numerical Analysis, eds. S. Gomez and
121 J-P Hennart, Kluwer Academic (Dordrecht), pp. 51-67
123 Powell M.J.D. (1998), "Direct search algorithms for optimization
124 calculations", Acta Numerica 7, 287-336
126 Powell M.J.D. (2007), "A view of algorithms for optimization without
127 derivatives", Cambridge University Technical Report DAMTP 2007/NA03
130 Examples
131 --------
132 Minimize the objective function f(x,y) = x*y subject
133 to the constraints x**2 + y**2 < 1 and y > 0::
135 >>> def objective(x):
136 ... return x[0]*x[1]
137 ...
138 >>> def constr1(x):
139 ... return 1 - (x[0]**2 + x[1]**2)
140 ...
141 >>> def constr2(x):
142 ... return x[1]
143 ...
144 >>> from scipy.optimize import fmin_cobyla
145 >>> fmin_cobyla(objective, [0.0, 0.1], [constr1, constr2], rhoend=1e-7)
146 array([-0.70710685, 0.70710671])
148 The exact solution is (-sqrt(2)/2, sqrt(2)/2).
152 """
153 err = "cons must be a sequence of callable functions or a single"\
154 " callable function."
155 try:
156 len(cons)
157 except TypeError as e:
158 if callable(cons):
159 cons = [cons]
160 else:
161 raise TypeError(err) from e
162 else:
163 for thisfunc in cons:
164 if not callable(thisfunc):
165 raise TypeError(err)
167 if consargs is None:
168 consargs = args
170 # build constraints
171 con = tuple({'type': 'ineq', 'fun': c, 'args': consargs} for c in cons)
173 # options
174 opts = {'rhobeg': rhobeg,
175 'tol': rhoend,
176 'disp': disp,
177 'maxiter': maxfun,
178 'catol': catol,
179 'callback': callback}
181 sol = _minimize_cobyla(func, x0, args, constraints=con,
182 **opts)
183 if disp and not sol['success']:
184 print("COBYLA failed to find a solution: %s" % (sol.message,))
185 return sol['x']
187@synchronized
188def _minimize_cobyla(fun, x0, args=(), constraints=(),
189 rhobeg=1.0, tol=1e-4, maxiter=1000,
190 disp=False, catol=2e-4, callback=None,
191 **unknown_options):
192 """
193 Minimize a scalar function of one or more variables using the
194 Constrained Optimization BY Linear Approximation (COBYLA) algorithm.
196 Options
197 -------
198 rhobeg : float
199 Reasonable initial changes to the variables.
200 tol : float
201 Final accuracy in the optimization (not precisely guaranteed).
202 This is a lower bound on the size of the trust region.
203 disp : bool
204 Set to True to print convergence messages. If False,
205 `verbosity` is ignored as set to 0.
206 maxiter : int
207 Maximum number of function evaluations.
208 catol : float
209 Tolerance (absolute) for constraint violations
211 """
212 _check_unknown_options(unknown_options)
213 maxfun = maxiter
214 rhoend = tol
215 iprint = int(bool(disp))
217 # check constraints
218 if isinstance(constraints, dict):
219 constraints = (constraints, )
221 for ic, con in enumerate(constraints):
222 # check type
223 try:
224 ctype = con['type'].lower()
225 except KeyError as e:
226 raise KeyError('Constraint %d has no type defined.' % ic) from e
227 except TypeError as e:
228 raise TypeError('Constraints must be defined using a '
229 'dictionary.') from e
230 except AttributeError as e:
231 raise TypeError("Constraint's type must be a string.") from e
232 else:
233 if ctype != 'ineq':
234 raise ValueError("Constraints of type '%s' not handled by "
235 "COBYLA." % con['type'])
237 # check function
238 if 'fun' not in con:
239 raise KeyError('Constraint %d has no function defined.' % ic)
241 # check extra arguments
242 if 'args' not in con:
243 con['args'] = ()
245 # m is the total number of constraint values
246 # it takes into account that some constraints may be vector-valued
247 cons_lengths = []
248 for c in constraints:
249 f = c['fun'](x0, *c['args'])
250 try:
251 cons_length = len(f)
252 except TypeError:
253 cons_length = 1
254 cons_lengths.append(cons_length)
255 m = sum(cons_lengths)
257 def calcfc(x, con):
258 f = fun(np.copy(x), *args)
259 i = 0
260 for size, c in izip(cons_lengths, constraints):
261 con[i: i + size] = c['fun'](x, *c['args'])
262 i += size
263 return f
265 def wrapped_callback(x):
266 if callback is not None:
267 callback(np.copy(x))
269 info = np.zeros(4, np.float64)
270 xopt, info = cobyla.minimize(calcfc, m=m, x=np.copy(x0), rhobeg=rhobeg,
271 rhoend=rhoend, iprint=iprint, maxfun=maxfun,
272 dinfo=info, callback=wrapped_callback)
274 if info[3] > catol:
275 # Check constraint violation
276 info[0] = 4
278 return OptimizeResult(x=xopt,
279 status=int(info[0]),
280 success=info[0] == 1,
281 message={1: 'Optimization terminated successfully.',
282 2: 'Maximum number of function evaluations '
283 'has been exceeded.',
284 3: 'Rounding errors are becoming damaging '
285 'in COBYLA subroutine.',
286 4: 'Did not converge to a solution '
287 'satisfying the constraints. See '
288 '`maxcv` for magnitude of violation.',
289 5: 'NaN result encountered.'
290 }.get(info[0], 'Unknown exit status.'),
291 nfev=int(info[1]),
292 fun=info[2],
293 maxcv=info[3])