Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.9/dist-packages/networkx/algorithms/centrality/percolation.py: 21%
33 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-20 07:00 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-20 07:00 +0000
1"""Percolation centrality measures."""
3import networkx as nx
4from networkx.algorithms.centrality.betweenness import (
5 _single_source_dijkstra_path_basic as dijkstra,
6)
7from networkx.algorithms.centrality.betweenness import (
8 _single_source_shortest_path_basic as shortest_path,
9)
11__all__ = ["percolation_centrality"]
14@nx._dispatch(node_attrs="attribute", edge_attrs="weight")
15def percolation_centrality(G, attribute="percolation", states=None, weight=None):
16 r"""Compute the percolation centrality for nodes.
18 Percolation centrality of a node $v$, at a given time, is defined
19 as the proportion of ‘percolated paths’ that go through that node.
21 This measure quantifies relative impact of nodes based on their
22 topological connectivity, as well as their percolation states.
24 Percolation states of nodes are used to depict network percolation
25 scenarios (such as during infection transmission in a social network
26 of individuals, spreading of computer viruses on computer networks, or
27 transmission of disease over a network of towns) over time. In this
28 measure usually the percolation state is expressed as a decimal
29 between 0.0 and 1.0.
31 When all nodes are in the same percolated state this measure is
32 equivalent to betweenness centrality.
34 Parameters
35 ----------
36 G : graph
37 A NetworkX graph.
39 attribute : None or string, optional (default='percolation')
40 Name of the node attribute to use for percolation state, used
41 if `states` is None. If a node does not set the attribute the
42 state of that node will be set to the default value of 1.
43 If all nodes do not have the attribute all nodes will be set to
44 1 and the centrality measure will be equivalent to betweenness centrality.
46 states : None or dict, optional (default=None)
47 Specify percolation states for the nodes, nodes as keys states
48 as values.
50 weight : None or string, optional (default=None)
51 If None, all edge weights are considered equal.
52 Otherwise holds the name of the edge attribute used as weight.
53 The weight of an edge is treated as the length or distance between the two sides.
56 Returns
57 -------
58 nodes : dictionary
59 Dictionary of nodes with percolation centrality as the value.
61 See Also
62 --------
63 betweenness_centrality
65 Notes
66 -----
67 The algorithm is from Mahendra Piraveenan, Mikhail Prokopenko, and
68 Liaquat Hossain [1]_
69 Pair dependencies are calculated and accumulated using [2]_
71 For weighted graphs the edge weights must be greater than zero.
72 Zero edge weights can produce an infinite number of equal length
73 paths between pairs of nodes.
75 References
76 ----------
77 .. [1] Mahendra Piraveenan, Mikhail Prokopenko, Liaquat Hossain
78 Percolation Centrality: Quantifying Graph-Theoretic Impact of Nodes
79 during Percolation in Networks
80 http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0053095
81 .. [2] Ulrik Brandes:
82 A Faster Algorithm for Betweenness Centrality.
83 Journal of Mathematical Sociology 25(2):163-177, 2001.
84 https://doi.org/10.1080/0022250X.2001.9990249
85 """
86 percolation = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
88 nodes = G
90 if states is None:
91 states = nx.get_node_attributes(nodes, attribute, default=1)
93 # sum of all percolation states
94 p_sigma_x_t = 0.0
95 for v in states.values():
96 p_sigma_x_t += v
98 for s in nodes:
99 # single source shortest paths
100 if weight is None: # use BFS
101 S, P, sigma, _ = shortest_path(G, s)
102 else: # use Dijkstra's algorithm
103 S, P, sigma, _ = dijkstra(G, s, weight)
104 # accumulation
105 percolation = _accumulate_percolation(
106 percolation, S, P, sigma, s, states, p_sigma_x_t
107 )
109 n = len(G)
111 for v in percolation:
112 percolation[v] *= 1 / (n - 2)
114 return percolation
117def _accumulate_percolation(percolation, S, P, sigma, s, states, p_sigma_x_t):
118 delta = dict.fromkeys(S, 0)
119 while S:
120 w = S.pop()
121 coeff = (1 + delta[w]) / sigma[w]
122 for v in P[w]:
123 delta[v] += sigma[v] * coeff
124 if w != s:
125 # percolation weight
126 pw_s_w = states[s] / (p_sigma_x_t - states[w])
127 percolation[w] += delta[w] * pw_s_w
128 return percolation