ioftools / networkxMiCe / networkxmaster / networkx / algorithms / centrality / MiCe.py @ 5cef0f13
History  View  Annotate  Download (5.05 KB)
1 
#!/usr/bin/env python


2 
# * coding: utf8 *

3  
4 
# This program is free software: you can redistribute it and/or modify

5 
# it under the terms of the GNU General Public License as published by

6 
# the Free Software Foundation, either version 3 of the License, or

7 
# (at your option) any later version.

8 
#

9 
# This program is distributed in the hope that it will be useful,

10 
# but WITHOUT ANY WARRANTY; without even the implied warranty of

11 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the

12 
# GNU General Public License for more details.

13 
#

14 
# You should have received a copy of the GNU General Public License

15 
# along with this program. If not, see <http://www.gnu.org/licenses/>.

16 
#

17 
# Copyright (C) 2019 Mattia Milani <mattia.milani@studenti.unitn.it>

18 
"""Milani Centrality."""

19  
20 
from __future__ import division 
21  
22 
import networkx as nx 
23  
24 
__all__ = ['mice_centrality']

25  
26  
27 
def new_mice_centrality(g, cutoff=None, normalized=True, weight=None, dest_identifier=None): 
28 
"""Compute distributed partial centrality for nodes.

29 

30 
The centrality of a node is the fraction of all shortest

31 
paths that pass through that node.

32 

33 
Parameters

34 


35 
g : graph

36 
A networkx graph.

37 

38 
cutoff : bool, optional

39 
If specified, only consider paths of length <= cutoff.

40 

41 
normalized : bool, optional (default=True)

42 
If True the dpc values are normalized by b=b/(n1)(n2) where

43 
n is the number of nodes in the destination set.

44 

45 
weight : None or string, optional (default=None)

46 
If None, edge weights are ignored.

47 
Otherwise holds the name of the edge attribute used as weight.

48 

49 
dest_identifier : string, (identifier for the stub nodes)

50 
If specified is a parameter used to discern between stub nodes and transit nodes

51 

52 
Returns

53 


54 
nodes : dictionary

55 
Dictionary of nodes with centrality as the value.

56 

57 
See Also

58 


59 
betweenness_centrality()

60 
"""

61  
62 
# Initlally all loads are 0

63 
dpc_load = {}.fromkeys(g, 0.0)

64 
dpc = {} 
65 
# Take the list of stub nodes

66 
nodeList = list(g.nodes(data=dest_identifier))

67 
for node in nodeList: 
68 
if node[1] == 1: 
69 
dpc[node[0]] = 0.0 # For each initial stub node I set a dpc of 0 
70  
71 
# Calculate the load dispersion between each couple of stub nodes

72 
for source in dpc: 
73 
ubetween = _node_betweenness(g, source, cutoff, weight, dest_identifier) 
74 
for vk in ubetween: 
75 
dpc_load[vk] += ubetween[vk] 
76  
77 
if normalized:

78 
order = len([i for i in nodeList if i[1] == 1]) 
79 
if order <= 1: 
80 
return dpc_load # no normalization for only 1 node 
81 
scale = 1/(order * (order  1)) # scale is 1/ two times the summarized input load 
82 
for v in dpc_load: 
83 
dpc_load[v] *= scale # For each element in the load the final load is the actual load multiplied by scale

84 
return dpc_load # all nodes 
85  
86  
87 
def _node_betweenness(G, source, cutoff=False, weight=None, dest_identifier=None): 
88 
"""Node betweenness_centrality helper:

89 

90 
See betweenness_centrality for what you probably want.

91 
This actually computes "partial centrality" and not betweenness.

92 
See https://networkx.lanl.gov/ticket/103

93 

94 
This calculates the load of each node for paths from a single source.

95 
(The fraction of number of shortests paths from source that go

96 
through each node.)

97 

98 
To get the load for a node you need to do allpairs shortest paths.

99 

100 
If weight is not None then use Dijkstra for finding shortest paths.

101 
"""

102 
# get the predecessor and path length data

103 
if weight is None: 
104 
(pred, length) = nx.predecessor(G, source, cutoff=cutoff, 
105 
return_seen=True)

106 
else:

107 
(pred, length) = nx.dijkstra_predecessor_and_distance(G, source, 
108 
cutoff, weight) 
109  
110 
for predecessor in pred: 
111 
newlist = [] 
112 
if len(pred[predecessor]) > 0: 
113 
minimo = pred[predecessor][0]

114 
for elem in pred[predecessor]: 
115 
if int(elem) < int(minimo): 
116 
minimo = elem 
117 
newlist.append(minimo) 
118 
pred[predecessor][:] = newlist 
119  
120 
onodes = [(l, vert) for (vert, l) in length.items()] 
121 
onodes.sort() 
122 
onodes[:] = [vert for (l, vert) in onodes if l > 0] 
123  
124 
between = {}.fromkeys(length, 1.0)

125 
for node in G.nodes(data=True): 
126 
if node[1][dest_identifier] == 0: 
127 
between[node[0]] = 0.0 # No stub nodes does not propagate any contribute 
128 
else:

129 
between[node[0]] = 1.0 # Stub nodes propagate 1 contribute 
130  
131 
while onodes:

132 
v = onodes.pop() 
133 
if v in pred: 
134 
num_paths = len(pred[v]) # Discount betweenness if more than 
135 
for x in pred[v]: # one shortest path. 
136 
if x == source: # stop if hit source 
137 
break # also have pred[v]==[source] 
138 
between[x] += between[v] / float(num_paths)

139  
140 
for node in G.nodes(data=True): 
141 
if node[1][dest_identifier] == 1: 
142 
between[node[0]] = 1.0 
143  
144 
return between

145  
146  
147 
mice_centrality = new_mice_centrality 