def CentralityMeasures(G):
# Betweenness centrality
bet_cen = nx.betweenness_centrality(G)
# Closeness centrality
clo_cen = nx.closeness_centrality(G)
# Eigenvector centrality
eig_cen = nx.eigenvector_centrality(G)
# Degree centrality
deg_cen = nx.degree_centrality(G)
#print bet_cen, clo_cen, eig_cen
print "# Betweenness centrality:" + str(bet_cen)
print "# Closeness centrality:" + str(clo_cen)
print "# Eigenvector centrality:" + str(eig_cen)
print "# Degree centrality:" + str(deg_cen)
#main function
python类betweenness_centrality()的实例源码
egocentric_network_1_5.py 文件源码
项目:Visualization-of-popular-algorithms-in-Python
作者: MUSoC
项目源码
文件源码
阅读 32
收藏 0
点赞 0
评论 0
egocentric_network_2.py 文件源码
项目:Visualization-of-popular-algorithms-in-Python
作者: MUSoC
项目源码
文件源码
阅读 26
收藏 0
点赞 0
评论 0
def CentralityMeasures(G):
# Betweenness centrality
bet_cen = nx.betweenness_centrality(G)
# Closeness centrality
clo_cen = nx.closeness_centrality(G)
# Eigenvector centrality
eig_cen = nx.eigenvector_centrality(G)
# Degree centrality
deg_cen = nx.degree_centrality(G)
#print bet_cen, clo_cen, eig_cen
print "# Betweenness centrality:" + str(bet_cen)
print "# Closeness centrality:" + str(clo_cen)
print "# Eigenvector centrality:" + str(eig_cen)
print "# Degree centrality:" + str(deg_cen)
#main function
egocentric_network_1.py 文件源码
项目:Visualization-of-popular-algorithms-in-Python
作者: MUSoC
项目源码
文件源码
阅读 31
收藏 0
点赞 0
评论 0
def CentralityMeasures(G):
# Betweenness centrality
bet_cen = nx.betweenness_centrality(G)
# Closeness centrality
clo_cen = nx.closeness_centrality(G)
# Eigenvector centrality
eig_cen = nx.eigenvector_centrality(G)
# Degree centrality
deg_cen = nx.degree_centrality(G)
#print bet_cen, clo_cen, eig_cen
print "# Betweenness centrality:" + str(bet_cen)
print "# Closeness centrality:" + str(clo_cen)
print "# Eigenvector centrality:" + str(eig_cen)
print "# Degree centrality:" + str(deg_cen)
#main function
def authorNet_feature():
# output: compute the author centrialy for each author
# author centrality dict
authorCo = pickle.load(open(cspath+"coauthor","rb")) #
nodeSet = set()
edgeSet = set()
for key,val in authorCo.iteritems():
nodeSet.add(key)
edgeSet.update([(key,item) for item in val if item!=key])
pickle.dump(nodeSet,open(cspath+"co_nodeSet","wb"))
pickle.dump(edgeSet,open(cspath+"co_edgeSet","wb"))
g = nx.Graph()
g.add_nodes_from(nodeSet)
g.add_edges_from(edgeSet)
interested_node = None
clo_cen = defaultdict(int)
for node in g.nodes():
clo_cen[node]=1
# Closeness centrality
#clo_cen = nx.betweenness_centrality(g, k=int(len(g.nodes())/5))
#centrality is time-consuming, denote this in real atmosphere
pickle.dump(clo_cen,open(cspath+"author_cen","wb"))
print 'authorNet_feature finish'
def calculate_betweenness(graph):
print "\n\tCalculating Betweenness Centrality..."
g = graph
bc = nx.betweenness_centrality(g.to_undirected())
nx.set_node_attributes(g, 'betweenness', bc)
degbetw_sorted = sorted(bc.items(), key=itemgetter(1), reverse=True)
for key, value in degbetw_sorted[0:10]:
print "\t > ", key, round(value, 4)
return g, bc
def main(opts):
df = pd.read_csv(opts['biogrid'], sep='\t')
interact_df = df[['Official Symbol Interactor A',
'Official Symbol Interactor B']]
interact_genes = interact_df.dropna().values.tolist()
G = nx.Graph()
G.add_edges_from(map(tuple, interact_genes))
gene_betweeness = nx.betweenness_centrality(G)
gene_degree = G.degree()
result = [[key, gene_betweeness[key], gene_degree[key]]
for key in gene_degree]
result = [['gene', 'gene_betweeness', 'gene_degree']] + result
with open(opts['output'], 'wb') as handle:
csv.writer(handle, delimiter='\t').writerows(result)
def UpdateThresholdDegree(self):
self.g = self.Graph_data().DrawHighlightedGraph(self.EdgeSliderValue)
# Degree Centrality for the the nodes involved
self.Centrality=nx.degree_centrality(self.g)
self.Betweeness=nx.betweenness_centrality(self.g)
self.ParticipationCoefficient = self.communityDetectionEngine.participation_coefficient(self.g,True)
self.LoadCentrality = nx.load_centrality(self.g)
self.ClosenessCentrality = nx.closeness_centrality(self.g)
for i in range(len(self.ParticipationCoefficient)):
if (str(float(self.ParticipationCoefficient[i])).lower() == 'nan'):
self.ParticipationCoefficient[i] = 0
i = 0
""" Calculate rank and Zscore """
MetrixDataStructure=eval('self.'+self.nodeSizeFactor)
from collections import OrderedDict
self.sortedValues = OrderedDict(sorted(MetrixDataStructure.items(), key=lambda x:x[1]))
self.average = np.average(self.sortedValues.values())
self.std = np.std(self.sortedValues.values())
for item in self.scene().items():
if isinstance(item, Node):
Size = eval('self.'+self.nodeSizeFactor+'[i]')
rank, Zscore = self.calculateRankAndZscore(i)
item.setNodeSize(Size,self.nodeSizeFactor,rank,Zscore)
i = i + 1
self.ThresholdChange.emit(True)
if not(self.ColorNodesBasedOnCorrelation):
self.Ui.communityLevelLineEdit.setText(str(self.level))
self.DendoGramDepth.emit(self.level)
self.Refresh()
def central_list(E):
centralities = []
centralities.append(nx.in_degree_centrality(E))
centralities.append(nx.out_degree_centrality(E))
centralities.append(nx.closeness_centrality(E))
centralities.append(nx.betweenness_centrality(E))
centralities.append(nx.eigenvector_centrality(E))
for node in E.nodes_iter():
measures = ("\t").join(map(lambda f: str(f[node]), centralities))
print("%s: %s" % (node, measures))
def Between_Centrality(G):
Bet_Centrality = nx.betweenness_centrality(G)
#print "Bet_Centrality:", sorted(Bet_Centrality.iteritems(), key=lambda d:d[1], reverse = True)
return Bet_Centrality
def Between_Centrality(G):
Bet_Centrality = nx.betweenness_centrality(G)
#print "Bet_Centrality:", sorted(Bet_Centrality.iteritems(), key=lambda d:d[1], reverse = True)
return Bet_Centrality
def top_centrality() :
citations = db.select(["citing", "cited"], table="graph", limit=1000000)
print len(citations)
graph = nx.DiGraph()
for citing, cited in progress(citations, 10000) :
graph.add_edge(int(citing), int(cited))
print graph.number_of_nodes()
centrality = nx.betweenness_centrality(graph)
print centrality.items()[:100]
def betweenness_centrality(self):
"""
Parameters
----------
Returns
-------
NxGraph: Graph object
Examples
--------
>>>
"""
return nx.betweenness_centrality(self._graph, weight=self._weight_field)
def __init__(self, method='degree', analyzer=NltkNormalizer().split_and_normalize):
self.analyze = analyzer
self.method = method
self.methods_on_digraph = {'hits', 'pagerank', 'katz'}
self._get_scores = {'degree': nx.degree, 'betweenness': nx.betweenness_centrality,
'pagerank': nx.pagerank_scipy, 'hits': self._hits, 'closeness': nx.closeness_centrality,
'katz': nx.katz_centrality}[method]
# Add a new value when a new vocabulary item is seen
self.vocabulary = defaultdict()
self.vocabulary.default_factory = self.vocabulary.__len__
screenplay_network_viz.py 文件源码
项目:sceneTransitionNetMovieClassification
作者: daltonsi
项目源码
文件源码
阅读 31
收藏 0
点赞 0
评论 0
def graph_info(g):
result = {}
components = list(nx.strongly_connected_component_subgraphs(g))
in_degrees = g.in_degree()
out_degrees = g.out_degree()
highest_in_degree_node = sorted(in_degrees, key = lambda x: in_degrees[x], reverse = True)[0]
highest_out_degree_node = sorted(out_degrees, key = lambda x: out_degrees[x], reverse = True)[0]
result['highest in_degree node'] = highest_in_degree_node
result['highest out_degree_node'] = highest_out_degree_node
result['numnber of components'] = len(components)
result['number of nodes'] = g.number_of_nodes()
result['number of edges'] = g.number_of_edges()
#Degree centrality
in_degree_centrality = nx.in_degree_centrality(g)
out_degree_centrality = nx.out_degree_centrality(g)
result['sorted in_degree centrality'] = sorted([(el,in_degree_centrality[el]) for el in g.nodes()], key = lambda x: x[1], reverse = True)
result['sorted out_degree centrality'] = sorted([(el,out_degree_centrality[el]) for el in g.nodes()], key = lambda x: x[1], reverse = True)
result['closeness_centrality'] = sorted([(el,nx.closeness_centrality(g)[el]) for el in nx.closeness_centrality(g)], key = lambda x: x[1], reverse = True)
result['highest in_degree node closeness'] = nx.closeness_centrality(g)[highest_in_degree_node]
result['highest out_degree node closeness'] = nx.closeness_centrality(g)[highest_out_degree_node]
result['betweenness centrality'] = sorted([(el,nx.betweenness_centrality(g)[el]) for el in nx.betweenness_centrality(g)], key = lambda x: x[1], reverse = True)
result['highest in_degree node betweenness'] = nx.betweenness_centrality(g)[highest_in_degree_node]
result['highest in_degree node betweenness'] = nx.betweenness_centrality(g)[highest_out_degree_node]
largest_component = sorted (components, key = lambda x: x.number_of_nodes(), reverse = True)[0]
result['largest strongly component percent'] = largest_component.number_of_nodes()/float(g.number_of_nodes())
result['largest strongly component diameter'] = nx.diameter(largest_component)
result['largest strongly component average path length'] = nx.average_shortest_path_length(largest_component)
result['average_degree (undireceted)'] = sum(g.degree().values())/float(g.number_of_nodes())
result['avg_cluster_coefficient (transitivity)'] = nx.transitivity(g)
return result
def central_point_dominance(self):
"""
Compute central point dominance.
Returns
-------
cpd : float
Central point dominance
"""
bet_cen = nx.betweenness_centrality(self.to_undirected())
bet_cen = list(bet_cen.values())
cpd = sum(max(bet_cen) - np.array(bet_cen))/(len(bet_cen)-1)
return cpd
def count_top_centrality(graph, number=30):
dd = nx.betweenness_centrality(graph)
dc = Counter(dd)
return dict(dc.most_common(number))
def calculate_betweenness_centality(graph, k=CENTRALITY_SAMPLES):
"""Calculates the betweenness centrality over nodes in the graph. Tries to do it with a certain number of samples,
but then tries a complete approach if it fails.
:param pybel.BELGraph graph: A BEL graph
:param int k: The number of samples to use
:rtype: collections.Counter[tuple,float]
"""
try:
res = Counter(nx.betweenness_centrality(graph, k=k))
return res
except:
return Counter(nx.betweenness_centrality(graph))
def changeLayout(self,Layout='sfdp'):
Layout = (Layout.encode('ascii','ignore')).replace(' ','')
self.g = self.Graph_data().DrawHighlightedGraph(self.EdgeSliderValue)
# asking community detection Engine to compute the Layout
self.pos,Factor = self.communityDetectionEngine.communityLayoutCalculation(Layout,self.g)
# Degree Centrality for the the nodes involved
self.Centrality=nx.degree_centrality(self.g)
self.Betweeness=nx.betweenness_centrality(self.g)
self.LoadCentrality = nx.load_centrality(self.g)
self.ParticipationCoefficient = self.communityDetectionEngine.participation_coefficient(self.g,True)
self.ClosenessCentrality = nx.closeness_centrality(self.g)
for i in range(len(self.ParticipationCoefficient)):
if (str(float(self.ParticipationCoefficient[i])).lower() == 'nan'):
self.ParticipationCoefficient[i] = 0
i = 0
""" Calculate rank and Zscore """
MetrixDataStructure=eval('self.'+self.nodeSizeFactor)
from collections import OrderedDict
self.sortedValues = OrderedDict(sorted(MetrixDataStructure.items(), key=lambda x:x[1]))
self.average = np.average(self.sortedValues.values())
self.std = np.std(self.sortedValues.values())
for item in self.scene().items():
if isinstance(item, Node):
x,y=self.pos[i]
item.setPos(QtCore.QPointF(x,y)*Factor)
Size = eval('self.'+self.nodeSizeFactor+'[i]')
rank, Zscore = self.calculateRankAndZscore(i)
item.setNodeSize(Size,self.nodeSizeFactor,rank,Zscore)
i = i + 1
for edge in self.edges:
edge().adjust()
self.Refresh()
if not(self.PositionPreserve):
self.Scene_to_be_updated.setSceneRect(self.Scene_to_be_updated.itemsBoundingRect())
self.setScene(self.Scene_to_be_updated)
self.fitInView(self.Scene_to_be_updated.itemsBoundingRect(),QtCore.Qt.KeepAspectRatio)
self.Scene_to_be_updated.update()