我们从Python开源项目中,提取了以下6个代码示例,用于说明如何使用networkx.diameter()。
def get_data_prop(self): prop = super(frontendNetwork, self).get_data_prop() if self.is_symmetric(): nnz = np.triu(self.data).sum() else: nnz = self.data.sum() _nnz = self.data.sum(axis=1) d = {'instances': self.data.shape[1], 'nnz': nnz, 'nnz_mean': _nnz.mean(), 'nnz_var': _nnz.var(), 'density': self.density(), 'diameter': self.diameter(), 'clustering_coef': self.clustering_coefficient(), 'modularity': self.modularity(), 'communities': self.clusters_len(), 'features': self.get_nfeat(), 'directed': not self.is_symmetric() } prop.update(d) return prop
def template(self, d): d['time'] = d.get('time', None) netw_templ = '''###### $corpus Building: $time minutes Nodes: $instances Links: $nnz Degree mean: $nnz_mean Degree var: $nnz_var Diameter: $diameter Modularity: $modularity Clustering Coefficient: $clustering_coef Density: $density Communities: $communities Relations: $features Directed: $directed \n''' return super(frontendNetwork, self).template(d, netw_templ)
def test_diameter(): """ Pandit, Arka, and John C. Crittenden. "Index of network resilience (INR) for urban water distribution systems." Nature (2012). """ raise SkipTest inp_file = join(datadir,'Anytown.inp') # Create a water network model for results object wn = wntr.network.WaterNetworkModel(inp_file) G = wn.get_graph_deep_copy() udG = G.to_undirected() diameter = nx.diameter(udG) error = abs(5.0-diameter) assert_less(error, 0.01)
def statistics(self): """Return some topological information about the experiment""" stat = {} stat["net diameter"] = nx.diameter(self.network) stat["net radius"] = nx.radius(self.network) stat["net asp"] = nx.average_shortest_path_length(self.network) stat["input asp"] = net.inputASL(self.network, self.inputc) for m in self.measures.values(): distr = net.distances_to_roi(self.network, self.inputc,m.roi) stat["stim to roi distances, mean",m.name] = np.mean(distr) stat["stim to roi distances, var",m.name] = np.var(distr) centrs = nx.closeness_centrality(self.network) stat["roi centralities",m.name] = [centrs[tuple(node)] for node in np.transpose(m.roi.nonzero())] return stat
def diameter(self): g = self.getG() try: diameter = nx.diameter(g) except: diameter = None return diameter
def graph_info(g): result = {} components = list(nx.strongly_connected_component_subgraphs(g)) in_degrees = g.in_degree() out_degrees = g.out_degree() highest_in_degree_node = sorted(in_degrees, key = lambda x: in_degrees[x], reverse = True)[0] highest_out_degree_node = sorted(out_degrees, key = lambda x: out_degrees[x], reverse = True)[0] result['highest in_degree node'] = highest_in_degree_node result['highest out_degree_node'] = highest_out_degree_node result['numnber of components'] = len(components) result['number of nodes'] = g.number_of_nodes() result['number of edges'] = g.number_of_edges() #Degree centrality in_degree_centrality = nx.in_degree_centrality(g) out_degree_centrality = nx.out_degree_centrality(g) result['sorted in_degree centrality'] = sorted([(el,in_degree_centrality[el]) for el in g.nodes()], key = lambda x: x[1], reverse = True) result['sorted out_degree centrality'] = sorted([(el,out_degree_centrality[el]) for el in g.nodes()], key = lambda x: x[1], reverse = True) result['closeness_centrality'] = sorted([(el,nx.closeness_centrality(g)[el]) for el in nx.closeness_centrality(g)], key = lambda x: x[1], reverse = True) result['highest in_degree node closeness'] = nx.closeness_centrality(g)[highest_in_degree_node] result['highest out_degree node closeness'] = nx.closeness_centrality(g)[highest_out_degree_node] result['betweenness centrality'] = sorted([(el,nx.betweenness_centrality(g)[el]) for el in nx.betweenness_centrality(g)], key = lambda x: x[1], reverse = True) result['highest in_degree node betweenness'] = nx.betweenness_centrality(g)[highest_in_degree_node] result['highest in_degree node betweenness'] = nx.betweenness_centrality(g)[highest_out_degree_node] largest_component = sorted (components, key = lambda x: x.number_of_nodes(), reverse = True)[0] result['largest strongly component percent'] = largest_component.number_of_nodes()/float(g.number_of_nodes()) result['largest strongly component diameter'] = nx.diameter(largest_component) result['largest strongly component average path length'] = nx.average_shortest_path_length(largest_component) result['average_degree (undireceted)'] = sum(g.degree().values())/float(g.number_of_nodes()) result['avg_cluster_coefficient (transitivity)'] = nx.transitivity(g) return result