我们从Python开源项目中,提取了以下15个代码示例,用于说明如何使用networkx.write_gexf()。
def search(self, selected_affils, conf_name, year, exclude_papers=[], rtype="affil", force=False): """ Checks if the graph model already exists, otherwise creates one and runs the ranking on the nodes. """ graph = build_graph(conf_name, year, self.params['H'], self.params['min_topic_lift'], self.params['min_ngram_lift'], exclude_papers, force, load=True, save=self.save) # Store number of nodes for checking later self.nnodes = graph.number_of_nodes() # Rank nodes using subgraph scores = rank_nodes(graph, return_type=rtype, **self.params) # Adds the score to the nodes and writes to disk. A stupid cast # is required because write_gexf can't handle np.float64 scores = {nid: float(score) for nid, score in scores.items()} nx.set_node_attributes(graph, "score", scores) # nx.write_gexf(graph, utils.get_graph_file_name(model_folder, query)) # Returns the top values of the type of node of interest results = get_top_nodes(graph, scores.items(), limit=selected_affils, return_type=rtype) # Add to class object for future access self.graph = graph return results
def search(self, query, exclude=[], limit=20, rtype="paper", force=False): """ Checks if the graph model already exists, otherwise creates one and runs the ranking on the nodes. """ graph = build_graph(query, self.params['K'], self.params['H'], self.params['min_topic_lift'], self.params['min_ngram_lift'], exclude, force, load=True, save=self.save) # Store number of nodes for checking later self.nnodes = graph.number_of_nodes() # Rank nodes using subgraph scores = ranker.rank_nodes(graph, limit=limit, return_type=rtype, **self.params) # Adds the score to the nodes and writes to disk. A stupid cast # is required because write_gexf can't handle np.float64 scores = {nid: float(score) for nid, score in scores.items()} nx.set_node_attributes(graph, "score", scores) # nx.write_gexf(graph, utils.get_graph_file_name(model_folder, query)) # Returns the top values of the type of node of interest results = get_top_nodes(graph, scores.items(), limit=limit, return_type=rtype) # Add to class object for future access self.graph = graph return [str(pub_id) for _nid, pub_id, _score in results]
def __init__(self, **params) : self.params = params if not os.path.exists(config.MENG_GRAPH_PATH) : log.debug("Meng graph file not found. Building one at '%s'" % config.MENG_GRAPH_PATH) mb = MengModelBuilder() self.graph = mb.build() del mb log.debug("Meng graph built. %d nodes and %d edges." % (self.graph.number_of_nodes(), self.graph.number_of_edges())) utils.ensure_folder(os.path.dirname(config.MENG_GRAPH_PATH)) nx.write_gexf(self.graph, config.MENG_GRAPH_PATH) log.debug("Meng graph saved.") else: log.debug("Reading Meng graph file at '%s'" % config.MENG_GRAPH_PATH) self.graph = nx.read_gexf(config.MENG_GRAPH_PATH, node_type=int)
def build_graph_for_file(file_path, dir_name, name): data = open(file_path, 'r') G=nx.DiGraph() rows = csv.reader(data, quoting=csv.QUOTE_NONNUMERIC) next(rows) #skip the header for row in rows: row_fil = list(filter(lambda x: type(x) is float, row)) if G.has_node(row_fil[0]) is not True: G.add_node(row_fil[0], market_id=row_fil[1]) if G.has_node(row_fil[2]) is not True: G.add_node(row_fil[2], market_id=row_fil[3]) if G.has_edge(row_fil[0], row_fil[2]): old = G.get_edge_data(row_fil[0], row_fil[2]) G.add_edge(row_fil[0], row_fil[2], num_of_people=old['num_of_people'] + row_fil[4], total_price=old['total_price'] + row_fil[5]) else: G.add_edge(row_fil[0], row_fil[2], num_of_people=row_fil[4], total_price=row_fil[5]) output_file_path = ('graphs/' + name + '.gexf') nx.write_gexf(G, output_file_path)
def get_and_save_to_gexf(self, filepath='../output/net.gexf'): net = nx.DiGraph() for ind, item in enumerate(self.conceptions): net.add_node(ind, {'attvalues': ind}, label=item) x = random.uniform(0, 600) y = random.uniform(0, 600) r = random.randint(0, 256) g = random.randint(0, 256) b = random.randint(0, 256) net.node[ind]['viz'] = {'color': {'r': r, 'g': g, 'b': b, 'a': 0}, 'size': 50, 'position': {'x': x, 'y': y, 'z': 0}} assertions = list() for edge in self.edges: u = self.conceptions.index(edge[0]) v = self.conceptions.index(edge[1]) assertions.append((u, v, edge[2], edge[3])) net.add_edge(u, v, label=edge[3], weight=edge[2]) nx.write_gexf(net, filepath, encoding='utf-8', version="1.2draft") net_data = {'nodes': self.conceptions, 'edges': assertions} return net_data
def write_graph(graph, outfile): """ Write the networkx graph into a file in the gexf format. """ log.info("Dumping graph: %d nodes and %d edges." % (graph.number_of_nodes(), graph.number_of_edges())) nx.write_gexf(graph, outfile, encoding="utf-8")
def build_graph(conf_name, year, H, min_topic_lift, min_ngram_lift, exclude=[], force=False, save=True, load=False): """ Utility method to build and return the graph model. First we check if a graph file exists. If not, we check if the builder class is already instantiated. If not, we do it and proceed to build the graph. """ global builder model_folder = config.IN_MODELS_FOLDER % (config.DATASET, H) # Creates model folder if non existing if not os.path.exists(model_folder): os.makedirs(model_folder) graph_file = utils.get_graph_file_name(conf_name, model_folder) if force or (not os.path.exists(graph_file)): if not builder: builder = kddcup_model.ModelBuilder() # Builds the graph file graph = builder.build(conf_name, year, H, min_topic_lift, min_ngram_lift, exclude) # Stores gexf copy for caching purposes if save: nx.write_gexf(graph, graph_file) return graph else: # A gexf copy already exists in disk. Just load it and return # print graph_file try: graph = nx.read_gexf(graph_file, node_type=int) except: print "Problem opening '%s'." % graph_file sys.exit(1) return graph
def write_graph(graph, folder, query): graph_file = utils.get_graph_file_name(folder, query) nx.write_gexf(graph, graph_file)
def write_graph(graph, outfile): ''' Write the networkx graph into a file in the gexf format. ''' log.info("Dumping graph: %d nodes and %d edges." % (graph.number_of_nodes(), graph.number_of_edges())) nx.write_gexf(graph, outfile, encoding="utf-8")
def create_lpu(file_name, lpu_name, N_sensory, N_local, N_proj): """ Create a generic LPU graph. Creates a GEXF file containing the neuron and synapse parameters for an LPU containing the specified number of local and projection neurons. The GEXF file also contains the parameters for a set of sensory neurons that accept external input. All neurons are either spiking or graded potential neurons; the Leaky Integrate-and-Fire model is used for the former, while the Morris-Lecar model is used for the latter (i.e., the neuron's membrane potential is deemed to be its output rather than the time when it emits an action potential). Synapses use either the alpha function model or a conductance-based model. Parameters ---------- file_name : str Output GEXF file name. lpu_name : str Name of LPU. Used in port identifiers. N_sensory : int Number of sensory neurons. N_local : int Number of local neurons. N_proj : int Number of project neurons. Returns ------- g : networkx.MultiDiGraph Generated graph. """ g = create_lpu_graph(lpu_name, N_sensory, N_local, N_proj) nx.write_gexf(g, file_name)
def get_weights(self): G = self.G pr = self.pr max_pagerank = max(pr.itervalues()) # get the largest count to scale weights between 0 and 1. t = datetime.datetime.now() ts = int(time.mktime(t.timetuple())) temp = tempfile.mktemp(prefix=str(ts), suffix=".gexf") nx.write_gexf(G, temp) for (k, v) in pr.iteritems(): yield (k, float(v / max_pagerank))
def main(argv): query = None usr = None output_file = None pwd = None n = 20 try: opts, _args_ = getopt.getopt(argv, "hq:o:n:u:p:") except getopt.GetoptError: usage() sys.exit(2) for opt, arg in opts: if opt == '-h': sys.exit() elif opt=="-q": query = arg elif opt=="-o": output_file = arg elif opt=="-n": n = int(arg) elif opt=="-u": usr = arg elif opt=="-p": pwd = arg else : print "Invalid option: %s" % opt # Check mandatory arguments if (not query or not usr or not pwd) : usage() sys.exit(2) s = searchers.Searcher(**config.PARAMS) pub_ids = s.search(query, limit=n) if not output_file: output_file = utils.get_graph_file_name(query) # Writes the graph structure as a gexf file nx.write_gexf(s.graph, output_file) # Prints the results db = MyMySQL(db='csx', user=usr, passwd=pwd) for id in pub_ids : print "%12s\t %s" % (id, db.select_one("title", table="papers", where="id='%s'"%id))