Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.9/dist-packages/networkx/algorithms/centrality/load.py: 12%

83 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-10-20 07:00 +0000

1"""Load centrality.""" 

2from operator import itemgetter 

3 

4import networkx as nx 

5 

6__all__ = ["load_centrality", "edge_load_centrality"] 

7 

8 

9@nx._dispatch(edge_attrs="weight") 

10def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None): 

11 """Compute load centrality for nodes. 

12 

13 The load centrality of a node is the fraction of all shortest 

14 paths that pass through that node. 

15 

16 Parameters 

17 ---------- 

18 G : graph 

19 A networkx graph. 

20 

21 normalized : bool, optional (default=True) 

22 If True the betweenness values are normalized by b=b/(n-1)(n-2) where 

23 n is the number of nodes in G. 

24 

25 weight : None or string, optional (default=None) 

26 If None, edge weights are ignored. 

27 Otherwise holds the name of the edge attribute used as weight. 

28 The weight of an edge is treated as the length or distance between the two sides. 

29 

30 cutoff : bool, optional (default=None) 

31 If specified, only consider paths of length <= cutoff. 

32 

33 Returns 

34 ------- 

35 nodes : dictionary 

36 Dictionary of nodes with centrality as the value. 

37 

38 See Also 

39 -------- 

40 betweenness_centrality 

41 

42 Notes 

43 ----- 

44 Load centrality is slightly different than betweenness. It was originally 

45 introduced by [2]_. For this load algorithm see [1]_. 

46 

47 References 

48 ---------- 

49 .. [1] Mark E. J. Newman: 

50 Scientific collaboration networks. II. 

51 Shortest paths, weighted networks, and centrality. 

52 Physical Review E 64, 016132, 2001. 

53 http://journals.aps.org/pre/abstract/10.1103/PhysRevE.64.016132 

54 .. [2] Kwang-Il Goh, Byungnam Kahng and Doochul Kim 

55 Universal behavior of Load Distribution in Scale-Free Networks. 

56 Physical Review Letters 87(27):1–4, 2001. 

57 https://doi.org/10.1103/PhysRevLett.87.278701 

58 """ 

59 if v is not None: # only one node 

60 betweenness = 0.0 

61 for source in G: 

62 ubetween = _node_betweenness(G, source, cutoff, False, weight) 

63 betweenness += ubetween[v] if v in ubetween else 0 

64 if normalized: 

65 order = G.order() 

66 if order <= 2: 

67 return betweenness # no normalization b=0 for all nodes 

68 betweenness *= 1.0 / ((order - 1) * (order - 2)) 

69 else: 

70 betweenness = {}.fromkeys(G, 0.0) 

71 for source in betweenness: 

72 ubetween = _node_betweenness(G, source, cutoff, False, weight) 

73 for vk in ubetween: 

74 betweenness[vk] += ubetween[vk] 

75 if normalized: 

76 order = G.order() 

77 if order <= 2: 

78 return betweenness # no normalization b=0 for all nodes 

79 scale = 1.0 / ((order - 1) * (order - 2)) 

80 for v in betweenness: 

81 betweenness[v] *= scale 

82 return betweenness # all nodes 

83 

84 

85def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None): 

86 """Node betweenness_centrality helper: 

87 

88 See betweenness_centrality for what you probably want. 

89 This actually computes "load" and not betweenness. 

90 See https://networkx.lanl.gov/ticket/103 

91 

92 This calculates the load of each node for paths from a single source. 

93 (The fraction of number of shortests paths from source that go 

94 through each node.) 

95 

96 To get the load for a node you need to do all-pairs shortest paths. 

97 

98 If weight is not None then use Dijkstra for finding shortest paths. 

99 """ 

100 # get the predecessor and path length data 

101 if weight is None: 

102 (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) 

103 else: 

104 (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight) 

105 

106 # order the nodes by path length 

107 onodes = [(l, vert) for (vert, l) in length.items()] 

108 onodes.sort() 

109 onodes[:] = [vert for (l, vert) in onodes if l > 0] 

110 

111 # initialize betweenness 

112 between = {}.fromkeys(length, 1.0) 

113 

114 while onodes: 

115 v = onodes.pop() 

116 if v in pred: 

117 num_paths = len(pred[v]) # Discount betweenness if more than 

118 for x in pred[v]: # one shortest path. 

119 if x == source: # stop if hit source because all remaining v 

120 break # also have pred[v]==[source] 

121 between[x] += between[v] / num_paths 

122 # remove source 

123 for v in between: 

124 between[v] -= 1 

125 # rescale to be between 0 and 1 

126 if normalized: 

127 l = len(between) 

128 if l > 2: 

129 # scale by 1/the number of possible paths 

130 scale = 1 / ((l - 1) * (l - 2)) 

131 for v in between: 

132 between[v] *= scale 

133 return between 

134 

135 

136load_centrality = newman_betweenness_centrality 

137 

138 

139@nx._dispatch 

140def edge_load_centrality(G, cutoff=False): 

141 """Compute edge load. 

142 

143 WARNING: This concept of edge load has not been analysed 

144 or discussed outside of NetworkX that we know of. 

145 It is based loosely on load_centrality in the sense that 

146 it counts the number of shortest paths which cross each edge. 

147 This function is for demonstration and testing purposes. 

148 

149 Parameters 

150 ---------- 

151 G : graph 

152 A networkx graph 

153 

154 cutoff : bool, optional (default=False) 

155 If specified, only consider paths of length <= cutoff. 

156 

157 Returns 

158 ------- 

159 A dict keyed by edge 2-tuple to the number of shortest paths 

160 which use that edge. Where more than one path is shortest 

161 the count is divided equally among paths. 

162 """ 

163 betweenness = {} 

164 for u, v in G.edges(): 

165 betweenness[(u, v)] = 0.0 

166 betweenness[(v, u)] = 0.0 

167 

168 for source in G: 

169 ubetween = _edge_betweenness(G, source, cutoff=cutoff) 

170 for e, ubetweenv in ubetween.items(): 

171 betweenness[e] += ubetweenv # cumulative total 

172 return betweenness 

173 

174 

175def _edge_betweenness(G, source, nodes=None, cutoff=False): 

176 """Edge betweenness helper.""" 

177 # get the predecessor data 

178 (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) 

179 # order the nodes by path length 

180 onodes = [n for n, d in sorted(length.items(), key=itemgetter(1))] 

181 # initialize betweenness, doesn't account for any edge weights 

182 between = {} 

183 for u, v in G.edges(nodes): 

184 between[(u, v)] = 1.0 

185 between[(v, u)] = 1.0 

186 

187 while onodes: # work through all paths 

188 v = onodes.pop() 

189 if v in pred: 

190 # Discount betweenness if more than one shortest path. 

191 num_paths = len(pred[v]) 

192 for w in pred[v]: 

193 if w in pred: 

194 # Discount betweenness, mult path 

195 num_paths = len(pred[w]) 

196 for x in pred[w]: 

197 between[(w, x)] += between[(v, w)] / num_paths 

198 between[(x, w)] += between[(w, v)] / num_paths 

199 return between