我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用pylab.close()。
def view_trigger_snippets_bis(trigger_snippets, elec_index, save=None): fig = pylab.figure() ax = fig.add_subplot(1, 1, 1) for n in xrange(0, trigger_snippets.shape[2]): y = trigger_snippets[:, elec_index, n] x = numpy.arange(- (y.size - 1) / 2, (y.size - 1) / 2 + 1) b = 0.5 + 0.5 * numpy.random.rand() ax.plot(x, y, color=(0.0, 0.0, b), linestyle='solid') ax.grid(True) ax.set_xlim([numpy.amin(x), numpy.amax(x)]) ax.set_xlabel("time") ax.set_ylabel("amplitude") if save is None: pylab.show() else: pylab.savefig(save) pylab.close(fig) return
def view_dataset(X, color='blue', title=None, save=None): n_components = 2 pca = PCA(n_components) pca.fit(X) x = pca.transform(X) fig = pylab.figure() ax = fig.add_subplot(1, 1, 1) ax.scatter(x[:, 0], x[:, 1], c=color, s=5, lw=0.1) ax.grid(True) if title is None: ax.set_title("Dataset ({} samples)".format(X.shape[0])) else: ax.set_title(title + " ({} samples)".format(X.shape[0])) ax.set_xlabel("1st component") ax.set_ylabel("2nd component") if save is None: pylab.show() else: pylab.savefig(save) pylab.close(fig) return
def view_loss_curve(losss, title=None, save=None): '''Plot loss curve''' x_min = 1 x_max = len(losss) - 1 fig = pylab.figure() ax = fig.gca() ax.semilogy(range(x_min, x_max + 1), losss[1:], color='blue', linestyle='solid') ax.grid(True, which='both') if title is None: ax.set_title("Loss curve") else: ax.set_title(title) ax.set_xlabel("iteration") ax.set_ylabel("loss") ax.set_xlim([x_min - 1, x_max + 1]) if save is None: pylab.show() else: pylab.savefig(save) pylab.close(fig) return
def plot_convergence(history, prefix='', prefix2=''): plt.figure(figsize=(8, 5)) ax = plt.subplot(111) ax.get_xaxis().tick_bottom() ax.get_yaxis().tick_left() plt.plot(history["TC"], '-', lw=2.5, color=tableau20[0]) x = len(history["TC"]) y = np.max(history["TC"]) plt.text(0.5 * x, 0.8 * y, "TC", fontsize=18, fontweight='bold', color=tableau20[0]) if history.has_key("additivity"): plt.plot(history["additivity"], '-', lw=2.5, color=tableau20[1]) plt.text(0.5 * x, 0.3 * y, "additivity", fontsize=18, fontweight='bold', color=tableau20[1]) plt.ylabel('TC', fontsize=12, fontweight='bold') plt.xlabel('# Iterations', fontsize=12, fontweight='bold') plt.suptitle('Convergence', fontsize=12) filename = '{}/summary/convergence{}.pdf'.format(prefix, prefix2) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) plt.savefig(filename, bbox_inches="tight") plt.close('all') return True
def plot_heatmaps(data, mis, column_label, cont, topk=30, prefix=''): cmap = sns.cubehelix_palette(as_cmap=True, light=.9) m, nv = mis.shape for j in range(m): inds = np.argsort(- mis[j, :])[:topk] if len(inds) >= 2: plt.clf() order = np.argsort(cont[:,j]) subdata = data[:, inds][order].T subdata -= np.nanmean(subdata, axis=1, keepdims=True) subdata /= np.nanstd(subdata, axis=1, keepdims=True) columns = [column_label[i] for i in inds] sns.heatmap(subdata, vmin=-3, vmax=3, cmap=cmap, yticklabels=columns, xticklabels=False, mask=np.isnan(subdata)) filename = '{}/heatmaps/group_num={}.png'.format(prefix, j) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) plt.title("Latent factor {}".format(j)) plt.yticks(rotation=0) plt.savefig(filename, bbox_inches='tight') plt.close('all') #plot_rels(data[:, inds], map(lambda q: column_label[q], inds), colors=cont[:, j], # outfile=prefix + '/relationships/group_num=' + str(j), latent=labels[:, j], alpha=0.1)
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % epoch)) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) for i in range(self.num_display_words): pylab.subplot(self.num_display_words, 1, i + 1) if K.image_dim_ordering() == 'th': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input, cmap='Greys_r') pylab.xlabel('Truth = \'%s\' Decoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 12) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % epoch)) pylab.close() # Input Parameters
def output_groups(tcs, alpha, mis, column_label, thresh=0, prefix=''): f = safe_open(prefix + '/text_files/groups.txt', 'w+') g = safe_open(prefix + '/text_files/groups_no_overlaps.txt', 'w+') m, nv = mis.shape for j in range(m): f.write('Group num: %d, TC(X;Y_j): %0.3f\n' % (j, tcs[j])) g.write('Group num: %d, TC(X;Y_j): %0.3f\n' % (j, tcs[j])) inds = np.where(alpha[j] * mis[j] > thresh)[0] inds = inds[np.argsort(-alpha[j, inds] * mis[j, inds])] for ind in inds: f.write(column_label[ind] + ', %0.3f, %0.3f, %0.3f\n' % ( mis[j, ind], alpha[j, ind], mis[j, ind] * alpha[j, ind])) inds = np.where(alpha[j] == 1)[0] inds = inds[np.argsort(- mis[j, inds])] for ind in inds: g.write(column_label[ind] + ', %0.3f\n' % mis[j, ind]) f.close() g.close()
def save(GUI): global txtResultPath if GUI: import pylab as pl import nest.raster_plot import nest.voltage_trace for key in spikedetectors: try: nest.raster_plot.from_device(spikedetectors[key], hist=True) pl.savefig(f_name_gen("", "spikes_" + key.lower()), dpi=dpi_n, format='png') pl.close() except Exception: print(" * * * from {0} is NOTHING".format(key)) txtResultPath = 'txt/' logger.debug("Saving TEXT into {0}".format(txtResultPath)) if not os.path.exists(txtResultPath): os.mkdir(txtResultPath) for key in spikedetectors: save_spikes(spikedetectors[key], name=key) with open(txtResultPath + 'timeSimulation.txt', 'w') as f: for item in times: f.write(item)
def sort_obsid_from_sqlite(sqlite_file): """ """ import sqlite3 as dbdrv obs_dict = defaultdict(int) query = "SELECT DISTINCT(obs_id) FROM ac WHERE offline > -1 ORDER BY obs_id" dbconn = dbdrv.connect(sqlite_file) cur = dbconn.cursor() cur.execute(query) all_obs = cur.fetchall() # not OK if we have millions of obs numbers cur.close() for c, obsid_row in enumerate(all_obs): obs_dict[obsid_row[0]] = c + 1 return (obs_dict, all_obs[0][0], all_obs[-1][0])
def _get_epochs(self): import sqlite3 as dbdrv dbconn = dbdrv.connect(self._sqlite_file) q = "SELECT min(ts) from ac" cur = dbconn.cursor() cur.execute(q) dfirst_epoch = cur.fetchall()[0][0] cur.close() q = "SELECT max(ts) from ac" cur = dbconn.cursor() cur.execute(q) dlast_epoch = cur.fetchall()[0][0] cur.close() return (dfirst_epoch, dlast_epoch)
def pickleLoadACL(options): """ Return the fapDict """ if (os.path.exists(options.load_acl_file)): try: pkl_file = open(options.load_acl_file, 'rb') print 'Loading acl object from file %s' % options.load_acl_file acl = pickle.load(pkl_file) pkl_file.close() return acl if (acl == None): raise Exception("The acl object is None when reading from the file") except Exception, e: ex = str(e) print 'Fail to load the acl object from file %s' % options.load_acl_file raise e else: print 'Cannot locate the acl object file %s' % options.load_acl_file return None
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_dim_ordering() == 'th': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
def visualize_bin_list(self, bin_list, path): """ Will create a histogram of all bin_list entries and save it to the specified path """ # TODO use savelogic here for jj, bin_entry in enumerate(bin_list): hist_x, hist_y = self._traceanalysis_logic.calculate_histogram(bin_entry, num_bins=50) pb.plot(hist_x[0:len(hist_y)], hist_y) fname = 'bin_' + str(jj) + '.png' savepath = os.path.join(path, fname) pb.savefig(savepath) pb.close() # ========================================================================= # Connecting to GUI # ========================================================================= # absolutely not working at the moment.
def AddKlineLayer(self, kline): """""" if 0: kline = stock.Kline x =[] y = [] k = 0 for hisdat in kline.hisdats: if 0 : hisdat = stock.Hisdat x.append(k) y.append(hisdat.close) k += 1 plt.plot(x,y,'b') # # #----------------------------------------------------------------------
def AddCloses(self, closes, color='b', m=0, s=1): """""" x=[] i = 0 ys=[] for close in closes: x.append(i) i += 1 y = close*s y += m ys.append(y) plt.plot(x, ys, color) # #?????? #a - [x, y, flag(buy/sell)] #----------------------------------------------------------------------
def DrawDvs(pl, closes, curve, sign, dvs, pandl, sh, title, leag=None, lad=None ): pl.figure pl.subplot(311) pl.title("id:%s Sharpe ratio: %.2f"%(str(title),sh)) pl.plot(closes) DrawLine(pl, sign, closes) pl.subplot(312) pl.grid() if dvs != None: pl.plot(dvs) if isinstance(curve, np.ndarray): DrawZZ(pl, curve, 'r') if leag != None: pl.plot(leag, 'r') if lad != None: pl.plot(lad, 'b') #pl.plot(stock.GuiYiHua(closes[:i])[60:]) pl.subplot(313) pl.plot(sign) pl.plot(pandl) pl.show() pl.close()
def DrawDvsAndZZ(pl, dvs, zz, closes=None): """dvs?zz??????; dvs : ????closes, """ dvs = np.array(dvs) pl.figure if closes == None: pl.plot(dvs) pl.plot(zz[:,0], zz[:,1], 'r') else: pl.subplot(211) pl.plot(closes) pl.grid() pl.subplot(212) pl.grid() pl.plot(dvs) pl.plot(zz[:,0], zz[:,1], 'r') pl.show() pl.close()
def getDayDatas(date_win, codes): date = help.MyDate(date_win[0]) datas = np.zeros((len(codes)*5, (help.MyDate(date_win[1]).d-date.d).days)) for i, code in enumerate(codes): #print code guider = Guider(code) j = 0 date = help.MyDate(date_win[0]) while date.d < help.MyDate(date_win[1]).d: date.Next() #print date.echo() hisdat = guider.getDataFromDate(date.d) if not isinstance(hisdat, Hisdat) : datas[i*5:i*5+5, j] = 0 else: #?????????????? ??????? datas[i*5:i*5+5, j] = np.array([hisdat.open,hisdat.high,hisdat.low,hisdat.close,hisdat.volume]) j += 1 return datas
def DataToR(): codes = simulator.ISimulator.getGupiaos(enum.all) s = "" i = 0 l = [] for code in codes: g = Guider(code, start_day='2012-3-1', end_day='2012-11-1') print(g.code, g.getSize()) l.append(g.getSize()) if g.getSize() > 100: g.hisdats = g.hisdats[-100:] if g.getSize() == 100: s += g.__DataToR() i += 1 #if i == 600: #break #print max(l), min(l) f = open('C:\\chromium\\src\\autoxd3\\R\\stocka.txt','w') f.write(s) f.close()
def GetLLV(self, type="close"): """""" a=[] for i in range(0, self.getSize()): if 0: hisdat = Hisdat hisdat = self.getData(i) cur = 0 if type == "close": cur = hisdat.close if type == "high": cur = hisdat.high if type == "low": cur = hisdat.low if type == "open": cur = hisdat.open if type == "vol" or type == "volume" : cur = hisdat.volume a.append(cur) return min(a) #----------------------------------------------------------------------
def XiangDuiQuJian(self): """????""" account = Account() for i in range(60, self.getSize(), 1): if 0: hisdat = Hisdat hisdat = self.getData(i) day = 60 high = self.HHV("close", i, day) low = self.LLV("close", i, day) cur = hisdat.close v = (cur-low)/(high-low) if v > 0.9: account.sell(self.code, hisdat.close, -1, hisdat.date) if v < 0.1 : account.buy(self.code, hisdat.close, -1, hisdat.date) print(account.money) print(account.getMoney()) self.myprint() account.printWeiTuo()
def plot(self): #?????????????????? pl.figure #????? a = [] for h in self.weituo_historys: a.append(h.price) a = GuiYiHua(a) pl.plot(a, 'b') #??? a = np.array(self.total_moneys) a = GuiYiHua(a) pl.plot(a, 'r') pl.legend(['price list', 'money list']) pl.show() pl.close() #???????????????, ??????????
def getSDSSImage(ra,dec,radius=1.0,xsize=800,opt='GML',**kwargs): """ Download Sloan Digital Sky Survey images http://skyserver.sdss3.org/dr9/en/tools/chart/chart.asp radius (degrees) opts: (G) Grid, (L) Label, P (PhotoObj), S (SpecObj), O (Outline), (B) Bounding Box, (F) Fields, (M) Mask, (Q) Plates, (I) Invert """ import subprocess import tempfile url="http://skyservice.pha.jhu.edu/DR10/ImgCutout/getjpeg.aspx?" scale = 2. * radius * 3600. / xsize params=dict(ra=ra,dec=dec, width=xsize,height=xsize, scale=scale,opt=opt) query='&'.join("%s=%s"%(k,v) for k,v in params.items()) tmp = tempfile.NamedTemporaryFile(suffix='.jpeg') cmd='wget --progress=dot:mega -O %s "%s"'%(tmp.name,url+query) subprocess.call(cmd,shell=True) im = pylab.imread(tmp.name) tmp.close() return im
def drawStellarDensity(self,ax=None): if not ax: ax = plt.gca() # Stellar Catalog self._create_catalog() catalog = self.catalog #catalog=ugali.observation.catalog.Catalog(self.config,roi=self.roi) pix = ang2pix(self.nside, catalog.lon, catalog.lat) counts = collections.Counter(pix) pixels, number = numpy.array(sorted(counts.items())).T star_map = healpy.UNSEEN * numpy.ones(healpy.nside2npix(self.nside)) star_map[pixels] = number star_map = numpy.where(star_map == 0, healpy.UNSEEN, star_map) #im = healpy.gnomview(star_map,**self.gnom_kwargs) #healpy.graticule(dpar=1,dmer=1,color='0.5',verbose=False) #pylab.close() im = drawHealpixMap(star_map,self.glon,self.glat,self.radius,coord=self.coord) #im = ax.imshow(im,origin='bottom') try: ax.cax.colorbar(im) except: pylab.colorbar(im,ax=ax) ax.annotate("Stars",**self.label_kwargs) return im
def drawMask(self,ax=None, mask=None): if not ax: ax = plt.gca() # MAGLIM Mask if mask is None: filenames = self.config.getFilenames() catalog_pixels = self.roi.getCatalogPixels() mask_map = ugali.utils.skymap.readSparseHealpixMaps(filenames['mask_1'][catalog_pixels], field='MAGLIM') else: mask_map = healpy.UNSEEN*np.ones(healpy.nside2npix(self.config['coords']['nside_pixel'])) mask_map[mask.roi.pixels] = mask.mask_1.mask_roi_sparse mask_map = numpy.where(mask_map == healpy.UNSEEN, 0, mask_map) #im = healpy.gnomview(mask_map,**self.gnom_kwargs) #healpy.graticule(dpar=1,dmer=1,color='0.5',verbose=False) #pylab.close() #im = ax.imshow(im,origin='bottom') im = drawHealpixMap(mask_map,self.glon,self.glat,self.radius,coord=self.coord) try: ax.cax.colorbar(im) except: pylab.colorbar(im) ax.annotate("Mask",**self.label_kwargs) return im
def removeIllumination2(self, size, title = ''): out = ndimage.filters.gaussian_filter(self.image, size) pylab.figure() pylab.subplot(2,2,1) pylab.axis('off') pylab.imshow(self.image) pylab.subplot(2,2,2) pylab.axis('off') pylab.imshow(out) pylab.subplot(2,2,3) pylab.axis('off') pylab.imshow(self.image - out) pylab.subplot(2,2,4) pylab.axis('off') pylab.imshow(self.smooth - out) if title != '': pylab.savefig(title) pylab.close() else: pylab.show() self.smooth -= out return self.image - out
def plot(self, outpath=''): pylab.figure(figsize = (17,10)) diff = self.f2-self.f3 pylab.subplot(2,1,1) pylab.plot(range(self.lengthSeq), self.f2, 'r-', label = "f2") pylab.plot(range(self.lengthSeq), self.f3, 'g-', label = "f3") pylab.xlim([0., self.lengthSeq]) pylab.tick_params(axis='both', which='major', labelsize=25) pylab.subplot(2,1,2) diff2 = diff/self.f3 diff2 /= np.max(diff2) pylab.plot(range(self.lengthSeq), diff2, 'b-', label = "Rescaled (by max) difference / f3") pylab.xlabel("Temps (en images)", fontsize = 25) pylab.tick_params(axis='both', which='major', labelsize=25) pylab.xlim([0., self.lengthSeq]) #pylab.legend(loc= 2, prop = {'size':15}) pylab.savefig(outpath) pylab.close()
def view_waveforms_clusters(data, halo, threshold, templates, amps_lim, n_curves=200, save=False): nb_templates = templates.shape[1] n_panels = numpy.ceil(numpy.sqrt(nb_templates)) mask = numpy.where(halo > -1)[0] clust_idx = numpy.unique(halo[mask]) fig = pylab.figure() square = True center = len(data[0] - 1)//2 for count, i in enumerate(xrange(nb_templates)): if square: pylab.subplot(n_panels, n_panels, count + 1) if (numpy.mod(count, n_panels) != 0): pylab.setp(pylab.gca(), yticks=[]) if (count < n_panels*(n_panels - 1)): pylab.setp(pylab.gca(), xticks=[]) subcurves = numpy.where(halo == clust_idx[count])[0] for k in numpy.random.permutation(subcurves)[:n_curves]: pylab.plot(data[k], '0.5') pylab.plot(templates[:, count], 'r') pylab.plot(amps_lim[count][0]*templates[:, count], 'b', alpha=0.5) pylab.plot(amps_lim[count][1]*templates[:, count], 'b', alpha=0.5) xmin, xmax = pylab.xlim() pylab.plot([xmin, xmax], [-threshold, -threshold], 'k--') pylab.plot([xmin, xmax], [threshold, threshold], 'k--') #pylab.ylim(-1.5*threshold, 1.5*threshold) ymin, ymax = pylab.ylim() pylab.plot([center, center], [ymin, ymax], 'k--') pylab.title('Cluster %d' %i) if nb_templates > 0: pylab.tight_layout() if save: pylab.savefig(os.path.join(save[0], 'waveforms_%s' %save[1])) pylab.close() else: pylab.show() del fig
def view_artefact(data, save=False): fig = pylab.figure() pylab.plot(data.T) if save: pylab.savefig(os.path.join(save[0], 'artefact_%s' %save[1])) pylab.close() else: pylab.show() del fig
def view_trigger_snippets(trigger_snippets, chans, save=None): # Create output directory if necessary. if os.path.exists(save): for f in os.listdir(save): p = os.path.join(save, f) os.remove(p) os.removedirs(save) os.makedirs(save) # Plot figures. fig = pylab.figure() for (c, chan) in enumerate(chans): ax = fig.add_subplot(1, 1, 1) for n in xrange(0, trigger_snippets.shape[2]): y = trigger_snippets[:, c, n] x = numpy.arange(- (y.size - 1) / 2, (y.size - 1) / 2 + 1) b = 0.5 + 0.5 * numpy.random.rand() ax.plot(x, y, color=(0.0, 0.0, b), linestyle='solid') y = numpy.mean(trigger_snippets[:, c, :], axis=1) x = numpy.arange(- (y.size - 1) / 2, (y.size - 1) / 2 + 1) ax.plot(x, y, color=(1.0, 0.0, 0.0), linestyle='solid') ax.grid(True) ax.set_xlim([numpy.amin(x), numpy.amax(x)]) ax.set_title("Channel %d" %chan) ax.set_xlabel("time") ax.set_ylabel("amplitude") if save is not None: # Save plot. filename = "channel-%d.png" %chan path = os.path.join(save, filename) pylab.savefig(path) fig.clf() if save is None: pylab.show() else: pylab.close(fig) return
def view_mahalanobis_distribution(data_1, data_2, save=None): '''Plot Mahalanobis distribution Before and After''' fig = pylab.figure() ax = fig.add_subplot(1,2,1) if len(data_1) == 3: d_gt, d_ngt, d_noi = data_1 elif len(data_1) == 2: d_gt, d_ngt = data_1 if len(data_1) == 3: ax.hist(d_noi, bins=50, color='k', alpha=0.5, label="Noise") ax.hist(d_ngt, bins=50, color='b', alpha=0.5, label="Non GT") ax.hist(d_gt, bins=75, color='r', alpha=0.5, label="GT") ax.grid(True) ax.set_title("Before") ax.set_ylabel("") ax.set_xlabel('# Samples') ax.set_xlabel('Distances') if len(data_2) == 3: d_gt, d_ngt, d_noi = data_2 elif len(data_2) == 2: d_gt, d_ngt = data_2 ax = fig.add_subplot(1,2,2) if len(data_2) == 3: ax.hist(d_noi, bins=50, color='k', alpha=0.5, label="Noise") ax.hist(d_ngt, bins=50, color='b', alpha=0.5, label="Non GT") ax.hist(d_gt, bins=75, color='r', alpha=0.5, label="GT") ax.grid(True) ax.set_title("After") ax.set_ylabel("") ax.set_xlabel('Distances') ax.legend() if save is None: pylab.show() else: pylab.savefig(save) pylab.close(fig) return
def plot_bar_chart(page, datasets, dataset_labels, dataset_colors, x_group_labels, err=0, title=None, xlabel='Bins', ylabel='Counts'): assert len(datasets) == len(dataset_colors) == len(dataset_labels) for dataset in datasets: assert len(dataset) == len(datasets[0]) assert len(dataset) == len(x_group_labels) num_x_groups = len(datasets[0]) x_group_locations = pylab.arange(num_x_groups) width = 1.0 / float(len(datasets)+1) figure = pylab.figure() axis = figure.add_subplot(111) bars = [] for i in xrange(len(datasets)): bar = axis.bar(x_group_locations + (width*i), datasets[i], width, yerr=err, color=dataset_colors[i], error_kw=dict(ecolor='pink', lw=3, capsize=6, capthick=3)) bars.append(bar) if title is not None: axis.set_title(title) if ylabel is not None: axis.set_ylabel(ylabel) if xlabel is not None: axis.set_xlabel(xlabel) axis.set_xticks(x_group_locations + width*len(datasets)/2) x_tick_names = axis.set_xticklabels(x_group_labels) rot = 0 if num_x_groups == 1 else 15 pylab.setp(x_tick_names, rotation=rot, fontsize=10) axis.set_xlim(-width, num_x_groups) y_tick_names = axis.get_yticklabels() pylab.setp(y_tick_names, rotation=0, fontsize=10) axis.legend([bar[0] for bar in bars], dataset_labels) page.savefig() pylab.close()
def output_groups(ws, moments, alpha, mis, column_label, thresh=0, prefix=''): tc = moments["TC"] tcs = moments["TCs"] add = moments["additivity"] dual = (moments['X_i Y_j'] * moments['X_i Z_j']).T f = safe_open(prefix + '/summary/groups.txt', 'w+') g = safe_open(prefix + '/summary/groups_no_overlaps.txt', 'w+') h = safe_open(prefix + '/summary/summary.txt', 'w+') h.write('Group, TC\n') m, nv = mis.shape f.write('variable, weight, MI\n') g.write('variable, weight, MI\n') for j in range(m): f.write('Group num: %d, TC(X;Y_j): %0.6f\n' % (j, tcs[j])) g.write('Group num: %d, TC(X;Y_j): %0.6f\n' % (j, tcs[j])) h.write('%d, %0.6f\n' % (j, tcs[j])) inds = np.where(alpha[j] > 0)[0] inds = inds[np.argsort(-np.abs(ws)[j][inds])] for ind in inds: f.write(column_label[ind] + ', {:.3f}, {:.3f}\n'.format(ws[j][ind], mis[j][ind])) inds = np.where(np.argmax(np.abs(ws), axis=0) == j)[0] inds = inds[np.argsort(-np.abs(ws)[j][inds])] for ind in inds: g.write(column_label[ind] + ', {:.3f}, {:.3f}\n'.format(ws[j][ind], mis[j][ind])) h.write('Total: {:f}\n'.format(np.sum(tcs))) h.write('The total of individual TCs should approximately equal the objective: {:f}\n'.format(tc)) h.write('If not, this signals redundancy/synergy in the final solution (measured by additivity: {:f}'.format(add)) f.close() g.close() h.close()
def output_labels(labels, row_label, prefix=''): f = safe_open(prefix + '/summary/labels.txt', 'w+') ns, m = labels.shape for l in range(ns): f.write(row_label[l] + ',' + ','.join(map(str, labels[l, :])) + '\n') f.close()
def on_epoch_end(self, epoch, logs={}): self.model.save_weights(os.path.join(self.output_dir, 'weights%02d.h5' % (epoch))) self.show_edit_distance(256) word_batch = next(self.text_img_gen)[0] res = decode_batch(self.test_func, word_batch['the_input'][0:self.num_display_words]) if word_batch['the_input'][0].shape[0] < 256: cols = 2 else: cols = 1 for i in range(self.num_display_words): pylab.subplot(self.num_display_words // cols, cols, i + 1) if K.image_data_format() == 'channels_first': the_input = word_batch['the_input'][i, 0, :, :] else: the_input = word_batch['the_input'][i, :, :, 0] pylab.imshow(the_input.T, cmap='Greys_r') pylab.xlabel('Truth = \'%s\'\nDecoded = \'%s\'' % (word_batch['source_str'][i], res[i])) fig = pylab.gcf() fig.set_size_inches(10, 13) pylab.savefig(os.path.join(self.output_dir, 'e%02d.png' % (epoch))) pylab.close()
def plot_heatmaps(data, labels, alpha, mis, column_label, cont, topk=20, prefix='', focus=''): cmap = sns.cubehelix_palette(as_cmap=True, light=.9) m, nv = mis.shape for j in range(m): inds = np.where(np.logical_and(alpha[j] > 0, mis[j] > 0.))[0] inds = inds[np.argsort(- alpha[j, inds] * mis[j, inds])][:topk] if focus in column_label: ifocus = column_label.index(focus) if not ifocus in inds: inds = np.insert(inds, 0, ifocus) if len(inds) >= 2: plt.clf() order = np.argsort(cont[:,j]) subdata = data[:, inds][order].T subdata -= np.nanmean(subdata, axis=1, keepdims=True) subdata /= np.nanstd(subdata, axis=1, keepdims=True) columns = [column_label[i] for i in inds] sns.heatmap(subdata, vmin=-3, vmax=3, cmap=cmap, yticklabels=columns, xticklabels=False, mask=np.isnan(subdata)) filename = '{}/heatmaps/group_num={}.png'.format(prefix, j) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) plt.title("Latent factor {}".format(j)) plt.savefig(filename, bbox_inches='tight') plt.close('all') #plot_rels(data[:, inds], list(map(lambda q: column_label[q], inds)), colors=cont[:, j], # outfile=prefix + '/relationships/group_num=' + str(j), latent=labels[:, j], alpha=0.1)
def plot_pairplots(data, labels, alpha, mis, column_label, topk=5, prefix='', focus=''): cmap = sns.cubehelix_palette(as_cmap=True, light=.9) plt.rcParams.update({'font.size': 32}) m, nv = mis.shape for j in range(m): inds = np.where(np.logical_and(alpha[j] > 0, mis[j] > 0.))[0] inds = inds[np.argsort(- alpha[j, inds] * mis[j, inds])][:topk] if focus in column_label: ifocus = column_label.index(focus) if not ifocus in inds: inds = np.insert(inds, 0, ifocus) if len(inds) >= 2: plt.clf() subdata = data[:, inds] columns = [column_label[i] for i in inds] subdata = pd.DataFrame(data=subdata, columns=columns) try: sns.pairplot(subdata, kind="reg", diag_kind="kde", size=5, dropna=True) filename = '{}/pairplots_regress/group_num={}.pdf'.format(prefix, j) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) plt.suptitle("Latent factor {}".format(j), y=1.01) plt.savefig(filename, bbox_inches='tight') plt.clf() except: pass subdata['Latent factor'] = labels[:,j] try: sns.pairplot(subdata, kind="scatter", dropna=True, vars=subdata.columns.drop('Latent factor'), hue="Latent factor", diag_kind="kde", size=5) filename = '{}/pairplots/group_num={}.pdf'.format(prefix, j) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) plt.suptitle("Latent factor {}".format(j), y=1.01) plt.savefig(filename, bbox_inches='tight') plt.close('all') except: pass
def output_labels(labels, row_label, prefix=''): f = safe_open(prefix + '/text_files/labels.txt', 'w+') ns, m = labels.shape for l in range(ns): f.write(row_label[l] + ',' + ','.join(map(str, labels[l, :])) + '\n') f.close()
def output_strong(tcs, alpha, mis, labels, prefix=''): f = safe_open(prefix + '/text_files/most_deterministic_groups.txt', 'w+') m, n = alpha.shape topk = 5 ixy = np.clip(np.sum(alpha * mis, axis=1) - tcs, 0, np.inf) hys = np.array([entropy(labels[:, j]) for j in range(m)]).clip(1e-6) ntcs = [(np.sum(np.sort(alpha[j] * mis[j])[-topk:]) - ixy[j]) / ((topk - 1) * hys[j]) for j in range(m)] f.write('Group num., NTC\n') for j, ntc in sorted(enumerate(ntcs), key=lambda q: -q[1]): f.write('%d, %0.3f\n' % (j, ntc)) f.close()
def anomalies(log_z, row_label=None, prefix=''): from scipy.special import erf ns = log_z.shape[1] if row_label is None: row_label = list(map(str, range(ns))) a_score = np.sum(log_z[:, :, 0], axis=0) mean, std = np.mean(a_score), np.std(a_score) a_score = (a_score - mean) / std percentile = 1. / ns anomalies = np.where(0.5 * (1 - erf(a_score / np.sqrt(2)) ) < percentile)[0] f = safe_open(prefix + '/text_files/anomalies.txt', 'w+') for i in anomalies: f.write(row_label[i] + ', %0.1f\n' % a_score[i]) f.close()
def plot_convergence(tc_history, prefix='', prefix2=''): pylab.plot(tc_history) pylab.xlabel('# iterations') filename = '{}/text_files/convergence{}.pdf'.format(prefix, prefix2) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) pylab.savefig(filename) pylab.close('all') return True
def saveBEVImageWithAxes(data, outputname, cmap = None, xlabel = 'x [m]', ylabel = 'z [m]', rangeX = [-10, 10], rangeXpx = None, numDeltaX = 5, rangeZ = [7, 62], rangeZpx = None, numDeltaZ = 5, fontSize = 16): ''' :param data: :param outputname: :param cmap: ''' aspect_ratio = float(data.shape[1])/data.shape[0] fig = pylab.figure() Scale = 8 # add +1 to get axis text fig.set_size_inches(Scale*aspect_ratio+1,Scale*1) ax = pylab.gca() #ax.set_axis_off() #fig.add_axes(ax) if cmap != None: pylab.set_cmap(cmap) #ax.imshow(data, interpolation='nearest', aspect = 'normal') ax.imshow(data, interpolation='nearest') if rangeXpx == None: rangeXpx = (0, data.shape[1]) if rangeZpx == None: rangeZpx = (0, data.shape[0]) modBev_plot(ax, rangeX, rangeXpx, numDeltaX, rangeZ, rangeZpx, numDeltaZ, fontSize, xlabel = xlabel, ylabel = ylabel) #plt.savefig(outputname, bbox_inches='tight', dpi = dpi) pylab.savefig(outputname, dpi = data.shape[0]/Scale) pylab.close() fig.clear()
def getPageSize(): import resource f = open("/proc/meminfo") mem = f.readline() f.close() return resource.getpagesize() / (1024 * float(mem[10:-3].strip()))
def save(GUI): global txtResultPath if GUI: import pylab as pl import nest.raster_plot import nest.voltage_trace logger.debug("Saving IMAGES into {0}".format(SAVE_PATH)) for key in spike_detectors: try: nest.raster_plot.from_device(spike_detectors[key], hist=True) pl.savefig("spikes_" + str(key) +".png", dpi=dpi_n, format='png') pl.close() except Exception: print("From spikes {0} is NOTHING".format(key)) for key in multimeters: try: nest.voltage_trace.from_device(multimeters[key]) pl.savefig("volt_" + str(key) +".png", dpi=dpi_n, format='png') pl.close() except Exception: print("From MM {0} is NOTHING".format(key)) txtResultPath = SAVE_PATH + 'txt/' logger.debug("Saving TEXT into {0}".format(txtResultPath)) if not os.path.exists(txtResultPath): os.mkdir(txtResultPath) for key in spike_detectors: save_spikes(spike_detectors[key], name=key) with open(txtResultPath + 'timeSimulation.txt', 'w') as f: for item in times: f.write(item)
def multipage(filename, figs=None): pp = PdfPages(filename) if figs is None: figs = [plt.figure(n) for n in plt.get_fignums()] for fig in figs: fig.savefig(pp, format='pdf') pp.close()