我们从Python开源项目中,提取了以下16个代码示例,用于说明如何使用networkx.degree_centrality()。
def CentralityMeasures(G): # Betweenness centrality bet_cen = nx.betweenness_centrality(G) # Closeness centrality clo_cen = nx.closeness_centrality(G) # Eigenvector centrality eig_cen = nx.eigenvector_centrality(G) # Degree centrality deg_cen = nx.degree_centrality(G) #print bet_cen, clo_cen, eig_cen print "# Betweenness centrality:" + str(bet_cen) print "# Closeness centrality:" + str(clo_cen) print "# Eigenvector centrality:" + str(eig_cen) print "# Degree centrality:" + str(deg_cen) #main function
def Nodes_Ranking(G, index): #Katz_Centrality = nx.katz_centrality(G) #print "Katz_Centrality:", sorted(Katz_Centrality.iteritems(), key=lambda d:d[1], reverse = True) #Page_Rank(G) if index == "degree_centrality": return Degree_Centrality(G) if index == "degree_mass_Centrality": return Degree_Mass_Centrality(G) if index == "between_centrality": return Between_Centrality(G) if index == "closeness_centrality": return Closeness_Centrality(G) if index == "kshell_centrality": return KShell_Centrality(G) if index == "eigen_centrality": return Eigen_Centrality_Andy(G) if index == "collective_influence": return Collective_Influence(G) if index == "enhanced_collective_centrality": return Enhanced_Collective_Influence(G) if index == "hybrid_diffusion_centrality": return Hybrid_Diffusion_Centrality(G)
def out_degree_centrality(G): """Compute the out-degree centrality for nodes. The out-degree centrality for a node v is the fraction of nodes its outgoing edges are connected to. Parameters ---------- G : graph A NetworkX graph Returns ------- nodes : dictionary Dictionary of nodes with out-degree centrality as values. See Also -------- degree_centrality, in_degree_centrality Notes ----- The degree centrality values are normalized by dividing by the maximum possible degree in a simple graph n-1 where n is the number of nodes in G. For multigraphs or graphs with self loops the maximum degree might be higher than n-1 and values of degree centrality greater than 1 are possible. """ if not G.is_directed(): raise nx.NetworkXError( \ "out_degree_centrality() not defined for undirected graphs.") centrality = {} s = 1.0 / (len(G) - 1.0) centrality = dict((n, d * s) for n, d in G.out_degree_iter()) return centrality
def calculate_degree_centrality(graph): print "\nCalculating Degree Centrality..." g = graph dc = nx.degree_centrality(g) nx.set_node_attributes(g, 'degree_cent', dc) degcent_sorted = sorted(dc.items(), key=itemgetter(1), reverse=True) for key, value in degcent_sorted[0:10]: print " > ", key, round(value, 4) return graph, dc
def UpdateThresholdDegree(self): self.g = self.Graph_data().DrawHighlightedGraph(self.EdgeSliderValue) # Degree Centrality for the the nodes involved self.Centrality=nx.degree_centrality(self.g) self.Betweeness=nx.betweenness_centrality(self.g) self.ParticipationCoefficient = self.communityDetectionEngine.participation_coefficient(self.g,True) self.LoadCentrality = nx.load_centrality(self.g) self.ClosenessCentrality = nx.closeness_centrality(self.g) for i in range(len(self.ParticipationCoefficient)): if (str(float(self.ParticipationCoefficient[i])).lower() == 'nan'): self.ParticipationCoefficient[i] = 0 i = 0 """ Calculate rank and Zscore """ MetrixDataStructure=eval('self.'+self.nodeSizeFactor) from collections import OrderedDict self.sortedValues = OrderedDict(sorted(MetrixDataStructure.items(), key=lambda x:x[1])) self.average = np.average(self.sortedValues.values()) self.std = np.std(self.sortedValues.values()) for item in self.scene().items(): if isinstance(item, Node): Size = eval('self.'+self.nodeSizeFactor+'[i]') rank, Zscore = self.calculateRankAndZscore(i) item.setNodeSize(Size,self.nodeSizeFactor,rank,Zscore) i = i + 1 self.ThresholdChange.emit(True) if not(self.ColorNodesBasedOnCorrelation): self.Ui.communityLevelLineEdit.setText(str(self.level)) self.DendoGramDepth.emit(self.level) self.Refresh()
def Degree_Centrality(G): Degree_Centrality = nx.degree_centrality(G) #print "Degree_Centrality:", sorted(Degree_Centrality.iteritems(), key=lambda d:d[1], reverse = True) return Degree_Centrality
def Nodes_Ranking(G, index): if index == "degree_centrality": return Degree_Centrality(G) if index == "between_centrality": return Between_Centrality(G) if index == "closeness_centrality": return Closeness_Centrality(G) if index == "pagerank_centrality": return Page_Rank(G) if index == "kshell_centrality": return KShell_Centrality(G) if index == "collective_influence": return Collective_Influence(G) if index == "enhanced_collective_centrality": return Enhanced_Collective_Influence(G) if index == "eigen_centrality": return Eigen_Centrality_Avg(G) #Eigen_Centrality_Andy(G) if index == "md_eigen_centrality": return MD_Eigen_Centrality_Andy(G) if index == "hc_eigen_centrality": return HC_Eigen_Centrality_Andy(G) #if index == "hybrid_diffusion_centrality": # return Hybrid_Diffusion_Centrality(G) if index == "PIR_Centrality": #i.e. weighted_hybrid_diffusion_centrality return PIR_Centrality_Avg(G) #Weighted_Hybrid_Diffusion_Centrality(G)
def adj_to_degree(y): # @debug: dont' call nxG or do a native integration ! # To convert normalized degrees to raw degrees #ba_c = {k:int(v*(len(ba_g)-1)) for k,v in ba_c.iteritems()} G = nxG(y) #degree = sorted(nx.degree(G).values(), reverse=True) #ba_c = nx.degree_centrality(G) return nx.degree(G)
def main(filename, type, constructed_graph = -1): # 1. original graph original_graph_path = os.path.join("data",filename,"") original_graph = generate_graph(original_graph_path,filename,-1) plt.figure("original graph degree distribution") draw_degree(original_graph) print('original edge number: ',len(original_graph.edges())) # 2. reconstruct graph if constructed_graph == -1: reconstruct_graph_path = os.path.join("reconstruction", filename, type,"") reconstruct_graph_adj = pickle.load(open(glob.glob(reconstruct_graph_path+"*.adj")[0],'rb')) else: reconstruct_graph_adj = constructed_graph reconstruct_graph = adj2Graph(reconstruct_graph_adj, edgesNumber = len(original_graph.edges())) print('edge number: ', len(reconstruct_graph.edges())) plt.figure("reconstruct graph degree distribution") draw_degree(reconstruct_graph) print("Clustering: ",nx.average_clustering(original_graph), ' ', nx.average_clustering(reconstruct_graph)) # print("Diameter: ", nx.average_shortest_path_length(original_graph), ' ', nx.average_shortest_path_length(reconstruct_graph)) # print("degree centrality: ", nx.degree_centrality(original_graph), ' ', nx.degree_centrality(reconstruct_graph)) #print("closeness centrality: ", nx.closeness_centrality(original_graph), ' ', nx.closeness_centrality(reconstruct_graph)) plt.show()
def degree_centrality(self): """ Parameters ---------- Returns ------- NxGraph: Graph object Examples -------- >>> """ return nx.degree_centrality(self._graph)
def calculate_centrality(G): degc = nx.degree_centrality(G) nx.set_node_attributes(G,'degree_cent', degc) degc_sorted = sorted(degc.items(), key=valuegetter(1), reverse=True) for key, value in degc_sorted[0:10]: print "Degree Centrailty:", key, value return G, degc
def changeLayout(self,Layout='sfdp'): Layout = (Layout.encode('ascii','ignore')).replace(' ','') self.g = self.Graph_data().DrawHighlightedGraph(self.EdgeSliderValue) # asking community detection Engine to compute the Layout self.pos,Factor = self.communityDetectionEngine.communityLayoutCalculation(Layout,self.g) # Degree Centrality for the the nodes involved self.Centrality=nx.degree_centrality(self.g) self.Betweeness=nx.betweenness_centrality(self.g) self.LoadCentrality = nx.load_centrality(self.g) self.ParticipationCoefficient = self.communityDetectionEngine.participation_coefficient(self.g,True) self.ClosenessCentrality = nx.closeness_centrality(self.g) for i in range(len(self.ParticipationCoefficient)): if (str(float(self.ParticipationCoefficient[i])).lower() == 'nan'): self.ParticipationCoefficient[i] = 0 i = 0 """ Calculate rank and Zscore """ MetrixDataStructure=eval('self.'+self.nodeSizeFactor) from collections import OrderedDict self.sortedValues = OrderedDict(sorted(MetrixDataStructure.items(), key=lambda x:x[1])) self.average = np.average(self.sortedValues.values()) self.std = np.std(self.sortedValues.values()) for item in self.scene().items(): if isinstance(item, Node): x,y=self.pos[i] item.setPos(QtCore.QPointF(x,y)*Factor) Size = eval('self.'+self.nodeSizeFactor+'[i]') rank, Zscore = self.calculateRankAndZscore(i) item.setNodeSize(Size,self.nodeSizeFactor,rank,Zscore) i = i + 1 for edge in self.edges: edge().adjust() self.Refresh() if not(self.PositionPreserve): self.Scene_to_be_updated.setSceneRect(self.Scene_to_be_updated.itemsBoundingRect()) self.setScene(self.Scene_to_be_updated) self.fitInView(self.Scene_to_be_updated.itemsBoundingRect(),QtCore.Qt.KeepAspectRatio) self.Scene_to_be_updated.update()
def main(): domain_name = 'baidu.com' domain_pkts = get_data(domain_name) node_cname, node_ip, visit_total, edges, node_main = get_ip_cname(domain_pkts[0]['details']) for i in domain_pkts[0]['details']: for v in i['answers']: edges.append((v['domain_name'],v['dm_data'])) DG = nx.DiGraph() DG.add_edges_from(edges) # ?????????IP?node for node in DG: if node in node_main and DG.successors(node) in node_ip: print node # ??cname???IP???? for node in DG: if node in node_cname and DG.successors(node) not in node_cname: # ???ip?????cname print "node",DG.out_degree(node),DG.in_degree(node),DG.degree(node) # ?cname??????? # for node in DG: # if node in node_cname and DG.predecessors(node) not in node_cname: # print len(DG.predecessors(node)) for node in DG: if node in node_main: if len(DG.successors(node)) ==3: print node print DG.successors(node) # print sorted(nx.degree(DG).values()) print nx.degree_assortativity_coefficient(DG) average_degree = sum(nx.degree(DG).values())/(len(node_cname)+len(node_ip)+len(node_main)) print average_degree print len(node_cname)+len(node_ip)+len(node_main) print len(edges) print nx.degree_histogram(DG) # print nx.degree_centrality(DG) # print nx.in_degree_centrality(DG) # print nx.out_degree_centrality(DG) # print nx.closeness_centrality(DG) # print nx.load_centrality(DG)