Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/scipy/optimize/_qap.py: 7%
204 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-12 06:31 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-12 06:31 +0000
1import numpy as np
2import operator
3from . import (linear_sum_assignment, OptimizeResult)
4from ._optimize import _check_unknown_options
6from scipy._lib._util import check_random_state
7import itertools
9QUADRATIC_ASSIGNMENT_METHODS = ['faq', '2opt']
11def quadratic_assignment(A, B, method="faq", options=None):
12 r"""
13 Approximates solution to the quadratic assignment problem and
14 the graph matching problem.
16 Quadratic assignment solves problems of the following form:
18 .. math::
20 \min_P & \ {\ \text{trace}(A^T P B P^T)}\\
21 \mbox{s.t. } & {P \ \epsilon \ \mathcal{P}}\\
23 where :math:`\mathcal{P}` is the set of all permutation matrices,
24 and :math:`A` and :math:`B` are square matrices.
26 Graph matching tries to *maximize* the same objective function.
27 This algorithm can be thought of as finding the alignment of the
28 nodes of two graphs that minimizes the number of induced edge
29 disagreements, or, in the case of weighted graphs, the sum of squared
30 edge weight differences.
32 Note that the quadratic assignment problem is NP-hard. The results given
33 here are approximations and are not guaranteed to be optimal.
36 Parameters
37 ----------
38 A : 2-D array, square
39 The square matrix :math:`A` in the objective function above.
41 B : 2-D array, square
42 The square matrix :math:`B` in the objective function above.
44 method : str in {'faq', '2opt'} (default: 'faq')
45 The algorithm used to solve the problem.
46 :ref:`'faq' <optimize.qap-faq>` (default) and
47 :ref:`'2opt' <optimize.qap-2opt>` are available.
49 options : dict, optional
50 A dictionary of solver options. All solvers support the following:
52 maximize : bool (default: False)
53 Maximizes the objective function if ``True``.
55 partial_match : 2-D array of integers, optional (default: None)
56 Fixes part of the matching. Also known as a "seed" [2]_.
58 Each row of `partial_match` specifies a pair of matched nodes:
59 node ``partial_match[i, 0]`` of `A` is matched to node
60 ``partial_match[i, 1]`` of `B`. The array has shape ``(m, 2)``,
61 where ``m`` is not greater than the number of nodes, :math:`n`.
63 rng : {None, int, `numpy.random.Generator`,
64 `numpy.random.RandomState`}, optional
66 If `seed` is None (or `np.random`), the `numpy.random.RandomState`
67 singleton is used.
68 If `seed` is an int, a new ``RandomState`` instance is used,
69 seeded with `seed`.
70 If `seed` is already a ``Generator`` or ``RandomState`` instance then
71 that instance is used.
73 For method-specific options, see
74 :func:`show_options('quadratic_assignment') <show_options>`.
76 Returns
77 -------
78 res : OptimizeResult
79 `OptimizeResult` containing the following fields.
81 col_ind : 1-D array
82 Column indices corresponding to the best permutation found of the
83 nodes of `B`.
84 fun : float
85 The objective value of the solution.
86 nit : int
87 The number of iterations performed during optimization.
89 Notes
90 -----
91 The default method :ref:`'faq' <optimize.qap-faq>` uses the Fast
92 Approximate QAP algorithm [1]_; it typically offers the best combination of
93 speed and accuracy.
94 Method :ref:`'2opt' <optimize.qap-2opt>` can be computationally expensive,
95 but may be a useful alternative, or it can be used to refine the solution
96 returned by another method.
98 References
99 ----------
100 .. [1] J.T. Vogelstein, J.M. Conroy, V. Lyzinski, L.J. Podrazik,
101 S.G. Kratzer, E.T. Harley, D.E. Fishkind, R.J. Vogelstein, and
102 C.E. Priebe, "Fast approximate quadratic programming for graph
103 matching," PLOS one, vol. 10, no. 4, p. e0121002, 2015,
104 :doi:`10.1371/journal.pone.0121002`
106 .. [2] D. Fishkind, S. Adali, H. Patsolic, L. Meng, D. Singh, V. Lyzinski,
107 C. Priebe, "Seeded graph matching", Pattern Recognit. 87 (2019):
108 203-215, :doi:`10.1016/j.patcog.2018.09.014`
110 .. [3] "2-opt," Wikipedia.
111 https://en.wikipedia.org/wiki/2-opt
113 Examples
114 --------
115 >>> import numpy as np
116 >>> from scipy.optimize import quadratic_assignment
117 >>> A = np.array([[0, 80, 150, 170], [80, 0, 130, 100],
118 ... [150, 130, 0, 120], [170, 100, 120, 0]])
119 >>> B = np.array([[0, 5, 2, 7], [0, 0, 3, 8],
120 ... [0, 0, 0, 3], [0, 0, 0, 0]])
121 >>> res = quadratic_assignment(A, B)
122 >>> print(res)
123 fun: 3260
124 col_ind: [0 3 2 1]
125 nit: 9
127 The see the relationship between the returned ``col_ind`` and ``fun``,
128 use ``col_ind`` to form the best permutation matrix found, then evaluate
129 the objective function :math:`f(P) = trace(A^T P B P^T )`.
131 >>> perm = res['col_ind']
132 >>> P = np.eye(len(A), dtype=int)[perm]
133 >>> fun = np.trace(A.T @ P @ B @ P.T)
134 >>> print(fun)
135 3260
137 Alternatively, to avoid constructing the permutation matrix explicitly,
138 directly permute the rows and columns of the distance matrix.
140 >>> fun = np.trace(A.T @ B[perm][:, perm])
141 >>> print(fun)
142 3260
144 Although not guaranteed in general, ``quadratic_assignment`` happens to
145 have found the globally optimal solution.
147 >>> from itertools import permutations
148 >>> perm_opt, fun_opt = None, np.inf
149 >>> for perm in permutations([0, 1, 2, 3]):
150 ... perm = np.array(perm)
151 ... fun = np.trace(A.T @ B[perm][:, perm])
152 ... if fun < fun_opt:
153 ... fun_opt, perm_opt = fun, perm
154 >>> print(np.array_equal(perm_opt, res['col_ind']))
155 True
157 Here is an example for which the default method,
158 :ref:`'faq' <optimize.qap-faq>`, does not find the global optimum.
160 >>> A = np.array([[0, 5, 8, 6], [5, 0, 5, 1],
161 ... [8, 5, 0, 2], [6, 1, 2, 0]])
162 >>> B = np.array([[0, 1, 8, 4], [1, 0, 5, 2],
163 ... [8, 5, 0, 5], [4, 2, 5, 0]])
164 >>> res = quadratic_assignment(A, B)
165 >>> print(res)
166 fun: 178
167 col_ind: [1 0 3 2]
168 nit: 13
170 If accuracy is important, consider using :ref:`'2opt' <optimize.qap-2opt>`
171 to refine the solution.
173 >>> guess = np.array([np.arange(len(A)), res.col_ind]).T
174 >>> res = quadratic_assignment(A, B, method="2opt",
175 ... options = {'partial_guess': guess})
176 >>> print(res)
177 fun: 176
178 col_ind: [1 2 3 0]
179 nit: 17
181 """
183 if options is None:
184 options = {}
186 method = method.lower()
187 methods = {"faq": _quadratic_assignment_faq,
188 "2opt": _quadratic_assignment_2opt}
189 if method not in methods:
190 raise ValueError(f"method {method} must be in {methods}.")
191 res = methods[method](A, B, **options)
192 return res
195def _calc_score(A, B, perm):
196 # equivalent to objective function but avoids matmul
197 return np.sum(A * B[perm][:, perm])
200def _common_input_validation(A, B, partial_match):
201 A = np.atleast_2d(A)
202 B = np.atleast_2d(B)
204 if partial_match is None:
205 partial_match = np.array([[], []]).T
206 partial_match = np.atleast_2d(partial_match).astype(int)
208 msg = None
209 if A.shape[0] != A.shape[1]:
210 msg = "`A` must be square"
211 elif B.shape[0] != B.shape[1]:
212 msg = "`B` must be square"
213 elif A.ndim != 2 or B.ndim != 2:
214 msg = "`A` and `B` must have exactly two dimensions"
215 elif A.shape != B.shape:
216 msg = "`A` and `B` matrices must be of equal size"
217 elif partial_match.shape[0] > A.shape[0]:
218 msg = "`partial_match` can have only as many seeds as there are nodes"
219 elif partial_match.shape[1] != 2:
220 msg = "`partial_match` must have two columns"
221 elif partial_match.ndim != 2:
222 msg = "`partial_match` must have exactly two dimensions"
223 elif (partial_match < 0).any():
224 msg = "`partial_match` must contain only positive indices"
225 elif (partial_match >= len(A)).any():
226 msg = "`partial_match` entries must be less than number of nodes"
227 elif (not len(set(partial_match[:, 0])) == len(partial_match[:, 0]) or
228 not len(set(partial_match[:, 1])) == len(partial_match[:, 1])):
229 msg = "`partial_match` column entries must be unique"
231 if msg is not None:
232 raise ValueError(msg)
234 return A, B, partial_match
237def _quadratic_assignment_faq(A, B,
238 maximize=False, partial_match=None, rng=None,
239 P0="barycenter", shuffle_input=False, maxiter=30,
240 tol=0.03, **unknown_options):
241 r"""Solve the quadratic assignment problem (approximately).
243 This function solves the Quadratic Assignment Problem (QAP) and the
244 Graph Matching Problem (GMP) using the Fast Approximate QAP Algorithm
245 (FAQ) [1]_.
247 Quadratic assignment solves problems of the following form:
249 .. math::
251 \min_P & \ {\ \text{trace}(A^T P B P^T)}\\
252 \mbox{s.t. } & {P \ \epsilon \ \mathcal{P}}\\
254 where :math:`\mathcal{P}` is the set of all permutation matrices,
255 and :math:`A` and :math:`B` are square matrices.
257 Graph matching tries to *maximize* the same objective function.
258 This algorithm can be thought of as finding the alignment of the
259 nodes of two graphs that minimizes the number of induced edge
260 disagreements, or, in the case of weighted graphs, the sum of squared
261 edge weight differences.
263 Note that the quadratic assignment problem is NP-hard. The results given
264 here are approximations and are not guaranteed to be optimal.
266 Parameters
267 ----------
268 A : 2-D array, square
269 The square matrix :math:`A` in the objective function above.
270 B : 2-D array, square
271 The square matrix :math:`B` in the objective function above.
272 method : str in {'faq', '2opt'} (default: 'faq')
273 The algorithm used to solve the problem. This is the method-specific
274 documentation for 'faq'.
275 :ref:`'2opt' <optimize.qap-2opt>` is also available.
277 Options
278 -------
279 maximize : bool (default: False)
280 Maximizes the objective function if ``True``.
281 partial_match : 2-D array of integers, optional (default: None)
282 Fixes part of the matching. Also known as a "seed" [2]_.
284 Each row of `partial_match` specifies a pair of matched nodes:
285 node ``partial_match[i, 0]`` of `A` is matched to node
286 ``partial_match[i, 1]`` of `B`. The array has shape ``(m, 2)``, where
287 ``m`` is not greater than the number of nodes, :math:`n`.
289 rng : {None, int, `numpy.random.Generator`,
290 `numpy.random.RandomState`}, optional
292 If `seed` is None (or `np.random`), the `numpy.random.RandomState`
293 singleton is used.
294 If `seed` is an int, a new ``RandomState`` instance is used,
295 seeded with `seed`.
296 If `seed` is already a ``Generator`` or ``RandomState`` instance then
297 that instance is used.
298 P0 : 2-D array, "barycenter", or "randomized" (default: "barycenter")
299 Initial position. Must be a doubly-stochastic matrix [3]_.
301 If the initial position is an array, it must be a doubly stochastic
302 matrix of size :math:`m' \times m'` where :math:`m' = n - m`.
304 If ``"barycenter"`` (default), the initial position is the barycenter
305 of the Birkhoff polytope (the space of doubly stochastic matrices).
306 This is a :math:`m' \times m'` matrix with all entries equal to
307 :math:`1 / m'`.
309 If ``"randomized"`` the initial search position is
310 :math:`P_0 = (J + K) / 2`, where :math:`J` is the barycenter and
311 :math:`K` is a random doubly stochastic matrix.
312 shuffle_input : bool (default: False)
313 Set to `True` to resolve degenerate gradients randomly. For
314 non-degenerate gradients this option has no effect.
315 maxiter : int, positive (default: 30)
316 Integer specifying the max number of Frank-Wolfe iterations performed.
317 tol : float (default: 0.03)
318 Tolerance for termination. Frank-Wolfe iteration terminates when
319 :math:`\frac{||P_{i}-P_{i+1}||_F}{\sqrt{m')}} \leq tol`,
320 where :math:`i` is the iteration number.
322 Returns
323 -------
324 res : OptimizeResult
325 `OptimizeResult` containing the following fields.
327 col_ind : 1-D array
328 Column indices corresponding to the best permutation found of the
329 nodes of `B`.
330 fun : float
331 The objective value of the solution.
332 nit : int
333 The number of Frank-Wolfe iterations performed.
335 Notes
336 -----
337 The algorithm may be sensitive to the initial permutation matrix (or
338 search "position") due to the possibility of several local minima
339 within the feasible region. A barycenter initialization is more likely to
340 result in a better solution than a single random initialization. However,
341 calling ``quadratic_assignment`` several times with different random
342 initializations may result in a better optimum at the cost of longer
343 total execution time.
345 Examples
346 --------
347 As mentioned above, a barycenter initialization often results in a better
348 solution than a single random initialization.
350 >>> from numpy.random import default_rng
351 >>> rng = default_rng()
352 >>> n = 15
353 >>> A = rng.random((n, n))
354 >>> B = rng.random((n, n))
355 >>> res = quadratic_assignment(A, B) # FAQ is default method
356 >>> print(res.fun)
357 46.871483385480545 # may vary
359 >>> options = {"P0": "randomized"} # use randomized initialization
360 >>> res = quadratic_assignment(A, B, options=options)
361 >>> print(res.fun)
362 47.224831071310625 # may vary
364 However, consider running from several randomized initializations and
365 keeping the best result.
367 >>> res = min([quadratic_assignment(A, B, options=options)
368 ... for i in range(30)], key=lambda x: x.fun)
369 >>> print(res.fun)
370 46.671852533681516 # may vary
372 The '2-opt' method can be used to further refine the results.
374 >>> options = {"partial_guess": np.array([np.arange(n), res.col_ind]).T}
375 >>> res = quadratic_assignment(A, B, method="2opt", options=options)
376 >>> print(res.fun)
377 46.47160735721583 # may vary
379 References
380 ----------
381 .. [1] J.T. Vogelstein, J.M. Conroy, V. Lyzinski, L.J. Podrazik,
382 S.G. Kratzer, E.T. Harley, D.E. Fishkind, R.J. Vogelstein, and
383 C.E. Priebe, "Fast approximate quadratic programming for graph
384 matching," PLOS one, vol. 10, no. 4, p. e0121002, 2015,
385 :doi:`10.1371/journal.pone.0121002`
387 .. [2] D. Fishkind, S. Adali, H. Patsolic, L. Meng, D. Singh, V. Lyzinski,
388 C. Priebe, "Seeded graph matching", Pattern Recognit. 87 (2019):
389 203-215, :doi:`10.1016/j.patcog.2018.09.014`
391 .. [3] "Doubly stochastic Matrix," Wikipedia.
392 https://en.wikipedia.org/wiki/Doubly_stochastic_matrix
394 """
396 _check_unknown_options(unknown_options)
398 maxiter = operator.index(maxiter)
400 # ValueError check
401 A, B, partial_match = _common_input_validation(A, B, partial_match)
403 msg = None
404 if isinstance(P0, str) and P0 not in {'barycenter', 'randomized'}:
405 msg = "Invalid 'P0' parameter string"
406 elif maxiter <= 0:
407 msg = "'maxiter' must be a positive integer"
408 elif tol <= 0:
409 msg = "'tol' must be a positive float"
410 if msg is not None:
411 raise ValueError(msg)
413 rng = check_random_state(rng)
414 n = len(A) # number of vertices in graphs
415 n_seeds = len(partial_match) # number of seeds
416 n_unseed = n - n_seeds
418 # [1] Algorithm 1 Line 1 - choose initialization
419 if not isinstance(P0, str):
420 P0 = np.atleast_2d(P0)
421 if P0.shape != (n_unseed, n_unseed):
422 msg = "`P0` matrix must have shape m' x m', where m'=n-m"
423 elif ((P0 < 0).any() or not np.allclose(np.sum(P0, axis=0), 1)
424 or not np.allclose(np.sum(P0, axis=1), 1)):
425 msg = "`P0` matrix must be doubly stochastic"
426 if msg is not None:
427 raise ValueError(msg)
428 elif P0 == 'barycenter':
429 P0 = np.ones((n_unseed, n_unseed)) / n_unseed
430 elif P0 == 'randomized':
431 J = np.ones((n_unseed, n_unseed)) / n_unseed
432 # generate a nxn matrix where each entry is a random number [0, 1]
433 # would use rand, but Generators don't have it
434 # would use random, but old mtrand.RandomStates don't have it
435 K = _doubly_stochastic(rng.uniform(size=(n_unseed, n_unseed)))
436 P0 = (J + K) / 2
438 # check trivial cases
439 if n == 0 or n_seeds == n:
440 score = _calc_score(A, B, partial_match[:, 1])
441 res = {"col_ind": partial_match[:, 1], "fun": score, "nit": 0}
442 return OptimizeResult(res)
444 obj_func_scalar = 1
445 if maximize:
446 obj_func_scalar = -1
448 nonseed_B = np.setdiff1d(range(n), partial_match[:, 1])
449 if shuffle_input:
450 nonseed_B = rng.permutation(nonseed_B)
452 nonseed_A = np.setdiff1d(range(n), partial_match[:, 0])
453 perm_A = np.concatenate([partial_match[:, 0], nonseed_A])
454 perm_B = np.concatenate([partial_match[:, 1], nonseed_B])
456 # definitions according to Seeded Graph Matching [2].
457 A11, A12, A21, A22 = _split_matrix(A[perm_A][:, perm_A], n_seeds)
458 B11, B12, B21, B22 = _split_matrix(B[perm_B][:, perm_B], n_seeds)
459 const_sum = A21 @ B21.T + A12.T @ B12
461 P = P0
462 # [1] Algorithm 1 Line 2 - loop while stopping criteria not met
463 for n_iter in range(1, maxiter+1):
464 # [1] Algorithm 1 Line 3 - compute the gradient of f(P) = -tr(APB^tP^t)
465 grad_fp = (const_sum + A22 @ P @ B22.T + A22.T @ P @ B22)
466 # [1] Algorithm 1 Line 4 - get direction Q by solving Eq. 8
467 _, cols = linear_sum_assignment(grad_fp, maximize=maximize)
468 Q = np.eye(n_unseed)[cols]
470 # [1] Algorithm 1 Line 5 - compute the step size
471 # Noting that e.g. trace(Ax) = trace(A)*x, expand and re-collect
472 # terms as ax**2 + bx + c. c does not affect location of minimum
473 # and can be ignored. Also, note that trace(A@B) = (A.T*B).sum();
474 # apply where possible for efficiency.
475 R = P - Q
476 b21 = ((R.T @ A21) * B21).sum()
477 b12 = ((R.T @ A12.T) * B12.T).sum()
478 AR22 = A22.T @ R
479 BR22 = B22 @ R.T
480 b22a = (AR22 * B22.T[cols]).sum()
481 b22b = (A22 * BR22[cols]).sum()
482 a = (AR22.T * BR22).sum()
483 b = b21 + b12 + b22a + b22b
484 # critical point of ax^2 + bx + c is at x = -d/(2*e)
485 # if a * obj_func_scalar > 0, it is a minimum
486 # if minimum is not in [0, 1], only endpoints need to be considered
487 if a*obj_func_scalar > 0 and 0 <= -b/(2*a) <= 1:
488 alpha = -b/(2*a)
489 else:
490 alpha = np.argmin([0, (b + a)*obj_func_scalar])
492 # [1] Algorithm 1 Line 6 - Update P
493 P_i1 = alpha * P + (1 - alpha) * Q
494 if np.linalg.norm(P - P_i1) / np.sqrt(n_unseed) < tol:
495 P = P_i1
496 break
497 P = P_i1
498 # [1] Algorithm 1 Line 7 - end main loop
500 # [1] Algorithm 1 Line 8 - project onto the set of permutation matrices
501 _, col = linear_sum_assignment(P, maximize=True)
502 perm = np.concatenate((np.arange(n_seeds), col + n_seeds))
504 unshuffled_perm = np.zeros(n, dtype=int)
505 unshuffled_perm[perm_A] = perm_B[perm]
507 score = _calc_score(A, B, unshuffled_perm)
508 res = {"col_ind": unshuffled_perm, "fun": score, "nit": n_iter}
509 return OptimizeResult(res)
512def _split_matrix(X, n):
513 # definitions according to Seeded Graph Matching [2].
514 upper, lower = X[:n], X[n:]
515 return upper[:, :n], upper[:, n:], lower[:, :n], lower[:, n:]
518def _doubly_stochastic(P, tol=1e-3):
519 # Adapted from @btaba implementation
520 # https://github.com/btaba/sinkhorn_knopp
521 # of Sinkhorn-Knopp algorithm
522 # https://projecteuclid.org/euclid.pjm/1102992505
524 max_iter = 1000
525 c = 1 / P.sum(axis=0)
526 r = 1 / (P @ c)
527 P_eps = P
529 for it in range(max_iter):
530 if ((np.abs(P_eps.sum(axis=1) - 1) < tol).all() and
531 (np.abs(P_eps.sum(axis=0) - 1) < tol).all()):
532 # All column/row sums ~= 1 within threshold
533 break
535 c = 1 / (r @ P)
536 r = 1 / (P @ c)
537 P_eps = r[:, None] * P * c
539 return P_eps
542def _quadratic_assignment_2opt(A, B, maximize=False, rng=None,
543 partial_match=None,
544 partial_guess=None,
545 **unknown_options):
546 r"""Solve the quadratic assignment problem (approximately).
548 This function solves the Quadratic Assignment Problem (QAP) and the
549 Graph Matching Problem (GMP) using the 2-opt algorithm [1]_.
551 Quadratic assignment solves problems of the following form:
553 .. math::
555 \min_P & \ {\ \text{trace}(A^T P B P^T)}\\
556 \mbox{s.t. } & {P \ \epsilon \ \mathcal{P}}\\
558 where :math:`\mathcal{P}` is the set of all permutation matrices,
559 and :math:`A` and :math:`B` are square matrices.
561 Graph matching tries to *maximize* the same objective function.
562 This algorithm can be thought of as finding the alignment of the
563 nodes of two graphs that minimizes the number of induced edge
564 disagreements, or, in the case of weighted graphs, the sum of squared
565 edge weight differences.
567 Note that the quadratic assignment problem is NP-hard. The results given
568 here are approximations and are not guaranteed to be optimal.
570 Parameters
571 ----------
572 A : 2-D array, square
573 The square matrix :math:`A` in the objective function above.
574 B : 2-D array, square
575 The square matrix :math:`B` in the objective function above.
576 method : str in {'faq', '2opt'} (default: 'faq')
577 The algorithm used to solve the problem. This is the method-specific
578 documentation for '2opt'.
579 :ref:`'faq' <optimize.qap-faq>` is also available.
581 Options
582 -------
583 maximize : bool (default: False)
584 Maximizes the objective function if ``True``.
585 rng : {None, int, `numpy.random.Generator`,
586 `numpy.random.RandomState`}, optional
588 If `seed` is None (or `np.random`), the `numpy.random.RandomState`
589 singleton is used.
590 If `seed` is an int, a new ``RandomState`` instance is used,
591 seeded with `seed`.
592 If `seed` is already a ``Generator`` or ``RandomState`` instance then
593 that instance is used.
594 partial_match : 2-D array of integers, optional (default: None)
595 Fixes part of the matching. Also known as a "seed" [2]_.
597 Each row of `partial_match` specifies a pair of matched nodes: node
598 ``partial_match[i, 0]`` of `A` is matched to node
599 ``partial_match[i, 1]`` of `B`. The array has shape ``(m, 2)``,
600 where ``m`` is not greater than the number of nodes, :math:`n`.
601 partial_guess : 2-D array of integers, optional (default: None)
602 A guess for the matching between the two matrices. Unlike
603 `partial_match`, `partial_guess` does not fix the indices; they are
604 still free to be optimized.
606 Each row of `partial_guess` specifies a pair of matched nodes: node
607 ``partial_guess[i, 0]`` of `A` is matched to node
608 ``partial_guess[i, 1]`` of `B`. The array has shape ``(m, 2)``,
609 where ``m`` is not greater than the number of nodes, :math:`n`.
611 Returns
612 -------
613 res : OptimizeResult
614 `OptimizeResult` containing the following fields.
616 col_ind : 1-D array
617 Column indices corresponding to the best permutation found of the
618 nodes of `B`.
619 fun : float
620 The objective value of the solution.
621 nit : int
622 The number of iterations performed during optimization.
624 Notes
625 -----
626 This is a greedy algorithm that works similarly to bubble sort: beginning
627 with an initial permutation, it iteratively swaps pairs of indices to
628 improve the objective function until no such improvements are possible.
630 References
631 ----------
632 .. [1] "2-opt," Wikipedia.
633 https://en.wikipedia.org/wiki/2-opt
635 .. [2] D. Fishkind, S. Adali, H. Patsolic, L. Meng, D. Singh, V. Lyzinski,
636 C. Priebe, "Seeded graph matching", Pattern Recognit. 87 (2019):
637 203-215, https://doi.org/10.1016/j.patcog.2018.09.014
639 """
640 _check_unknown_options(unknown_options)
641 rng = check_random_state(rng)
642 A, B, partial_match = _common_input_validation(A, B, partial_match)
644 N = len(A)
645 # check trivial cases
646 if N == 0 or partial_match.shape[0] == N:
647 score = _calc_score(A, B, partial_match[:, 1])
648 res = {"col_ind": partial_match[:, 1], "fun": score, "nit": 0}
649 return OptimizeResult(res)
651 if partial_guess is None:
652 partial_guess = np.array([[], []]).T
653 partial_guess = np.atleast_2d(partial_guess).astype(int)
655 msg = None
656 if partial_guess.shape[0] > A.shape[0]:
657 msg = ("`partial_guess` can have only as "
658 "many entries as there are nodes")
659 elif partial_guess.shape[1] != 2:
660 msg = "`partial_guess` must have two columns"
661 elif partial_guess.ndim != 2:
662 msg = "`partial_guess` must have exactly two dimensions"
663 elif (partial_guess < 0).any():
664 msg = "`partial_guess` must contain only positive indices"
665 elif (partial_guess >= len(A)).any():
666 msg = "`partial_guess` entries must be less than number of nodes"
667 elif (not len(set(partial_guess[:, 0])) == len(partial_guess[:, 0]) or
668 not len(set(partial_guess[:, 1])) == len(partial_guess[:, 1])):
669 msg = "`partial_guess` column entries must be unique"
670 if msg is not None:
671 raise ValueError(msg)
673 fixed_rows = None
674 if partial_match.size or partial_guess.size:
675 # use partial_match and partial_guess for initial permutation,
676 # but randomly permute the rest.
677 guess_rows = np.zeros(N, dtype=bool)
678 guess_cols = np.zeros(N, dtype=bool)
679 fixed_rows = np.zeros(N, dtype=bool)
680 fixed_cols = np.zeros(N, dtype=bool)
681 perm = np.zeros(N, dtype=int)
683 rg, cg = partial_guess.T
684 guess_rows[rg] = True
685 guess_cols[cg] = True
686 perm[guess_rows] = cg
688 # match overrides guess
689 rf, cf = partial_match.T
690 fixed_rows[rf] = True
691 fixed_cols[cf] = True
692 perm[fixed_rows] = cf
694 random_rows = ~fixed_rows & ~guess_rows
695 random_cols = ~fixed_cols & ~guess_cols
696 perm[random_rows] = rng.permutation(np.arange(N)[random_cols])
697 else:
698 perm = rng.permutation(np.arange(N))
700 best_score = _calc_score(A, B, perm)
702 i_free = np.arange(N)
703 if fixed_rows is not None:
704 i_free = i_free[~fixed_rows]
706 better = operator.gt if maximize else operator.lt
707 n_iter = 0
708 done = False
709 while not done:
710 # equivalent to nested for loops i in range(N), j in range(i, N)
711 for i, j in itertools.combinations_with_replacement(i_free, 2):
712 n_iter += 1
713 perm[i], perm[j] = perm[j], perm[i]
714 score = _calc_score(A, B, perm)
715 if better(score, best_score):
716 best_score = score
717 break
718 # faster to swap back than to create a new list every time
719 perm[i], perm[j] = perm[j], perm[i]
720 else: # no swaps made
721 done = True
723 res = {"col_ind": perm, "fun": best_score, "nit": n_iter}
724 return OptimizeResult(res)