我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用pylab.scatter()。
def twoDimensionalScatter(title, title_x, title_y, x, y, lim_x = None, lim_y = None, color = 'b', size = 20, alpha=None): """ Create a two-dimensional scatter plot. INPUTS """ pylab.figure() pylab.scatter(x, y, c=color, s=size, alpha=alpha, edgecolors='none') pylab.xlabel(title_x) pylab.ylabel(title_y) pylab.title(title) if type(color) is not str: pylab.colorbar() if lim_x: pylab.xlim(lim_x[0], lim_x[1]) if lim_y: pylab.ylim(lim_y[0], lim_y[1]) ############################################################
def view_dataset(X, color='blue', title=None, save=None): n_components = 2 pca = PCA(n_components) pca.fit(X) x = pca.transform(X) fig = pylab.figure() ax = fig.add_subplot(1, 1, 1) ax.scatter(x[:, 0], x[:, 1], c=color, s=5, lw=0.1) ax.grid(True) if title is None: ax.set_title("Dataset ({} samples)".format(X.shape[0])) else: ax.set_title(title + " ({} samples)".format(X.shape[0])) ax.set_xlabel("1st component") ax.set_ylabel("2nd component") if save is None: pylab.show() else: pylab.savefig(save) pylab.close(fig) return
def predicted_vs_actual_y_xgb(self, xgb, best_nrounds, xgb_params, x_train_split, x_test_split, y_train_split, y_test_split, title_name): # Split the training data into an extra set of test # x_train_split, x_test_split, y_train_split, y_test_split = train_test_split(x_train, y_train) dtrain_split = xgb.DMatrix(x_train_split, label=y_train_split) dtest_split = xgb.DMatrix(x_test_split) print(np.shape(x_train_split), np.shape(x_test_split), np.shape(y_train_split), np.shape(y_test_split)) gbdt = xgb.train(xgb_params, dtrain_split, best_nrounds) y_predicted = gbdt.predict(dtest_split) plt.figure(figsize=(10, 5)) plt.scatter(y_test_split, y_predicted, s=20) rmse_pred_vs_actual = self.rmse(y_predicted, y_test_split) plt.title(''.join([title_name, ', Predicted vs. Actual.', ' rmse = ', str(rmse_pred_vs_actual)])) plt.xlabel('Actual y') plt.ylabel('Predicted y') plt.plot([min(y_test_split), max(y_test_split)], [min(y_test_split), max(y_test_split)]) plt.tight_layout()
def display_data(word_vectors, words, target_words=None): target_matrix = word_vectors.copy() if target_words: target_words = [line.strip().lower() for line in open(target_words)][:2000] rows = [words.index(word) for word in target_words if word in words] target_matrix = target_matrix[rows,:] else: rows = np.random.choice(len(word_vectors), size=1000, replace=False) target_matrix = target_matrix[rows,:] reduced_matrix = tsne(target_matrix, 2); Plot.figure(figsize=(200, 200), dpi=100) max_x = np.amax(reduced_matrix, axis=0)[0] max_y = np.amax(reduced_matrix, axis=0)[1] Plot.xlim((-max_x,max_x)) Plot.ylim((-max_y,max_y)) Plot.scatter(reduced_matrix[:, 0], reduced_matrix[:, 1], 20); for row_id in range(0, len(rows)): target_word = words[rows[row_id]] x = reduced_matrix[row_id, 0] y = reduced_matrix[row_id, 1] Plot.annotate(target_word, (x,y)) Plot.savefig("word_vectors.png");
def predicted_vs_actual_sale_price(self, x_train, y_train, title_name): # Split the training data into an extra set of test x_train_split, x_test_split, y_train_split, y_test_split = train_test_split(x_train, y_train) print(np.shape(x_train_split), np.shape(x_test_split), np.shape(y_train_split), np.shape(y_test_split)) lasso = LassoCV(alphas=[0.0001, 0.0003, 0.0006, 0.001, 0.003, 0.006, 0.01, 0.03, 0.06, 0.1, 0.3, 0.6, 1], max_iter=50000, cv=10) # lasso = RidgeCV(alphas=[0.0001, 0.0003, 0.0006, 0.001, 0.003, 0.006, 0.01, 0.03, 0.06, 0.1, # 0.3, 0.6, 1], cv=10) lasso.fit(x_train_split, y_train_split) y_predicted = lasso.predict(X=x_test_split) plt.figure(figsize=(10, 5)) plt.scatter(y_test_split, y_predicted, s=20) rmse_pred_vs_actual = self.rmse(y_predicted, y_test_split) plt.title(''.join([title_name, ', Predicted vs. Actual.', ' rmse = ', str(rmse_pred_vs_actual)])) plt.xlabel('Actual Sale Price') plt.ylabel('Predicted Sale Price') plt.plot([min(y_test_split), max(y_test_split)], [min(y_test_split), max(y_test_split)]) plt.tight_layout()
def predicted_vs_actual_sale_price_xgb(self, xgb_params, x_train, y_train, seed, title_name): # Split the training data into an extra set of test x_train_split, x_test_split, y_train_split, y_test_split = train_test_split(x_train, y_train) dtrain_split = xgb.DMatrix(x_train_split, label=y_train_split) dtest_split = xgb.DMatrix(x_test_split) res = xgb.cv(xgb_params, dtrain_split, num_boost_round=1000, nfold=4, seed=seed, stratified=False, early_stopping_rounds=25, verbose_eval=10, show_stdv=True) best_nrounds = res.shape[0] - 1 print(np.shape(x_train_split), np.shape(x_test_split), np.shape(y_train_split), np.shape(y_test_split)) gbdt = xgb.train(xgb_params, dtrain_split, best_nrounds) y_predicted = gbdt.predict(dtest_split) plt.figure(figsize=(10, 5)) plt.scatter(y_test_split, y_predicted, s=20) rmse_pred_vs_actual = self.rmse(y_predicted, y_test_split) plt.title(''.join([title_name, ', Predicted vs. Actual.', ' rmse = ', str(rmse_pred_vs_actual)])) plt.xlabel('Actual Sale Price') plt.ylabel('Predicted Sale Price') plt.plot([min(y_test_split), max(y_test_split)], [min(y_test_split), max(y_test_split)]) plt.tight_layout()
def scatter_labeled_z(z_batch, label_batch, filename="labeled_z"): fig = pylab.gcf() fig.set_size_inches(20.0, 16.0) pylab.clf() colors = ["#2103c8", "#0e960e", "#e40402","#05aaa8","#ac02ab","#aba808","#151515","#94a169", "#bec9cd", "#6a6551"] for n in range(z_batch.shape[0]): result = pylab.scatter(z_batch[n, 0], z_batch[n, 1], c=colors[label_batch[n]], s=40, marker="o", edgecolors='none') classes = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"] recs = [] for i in range(0, len(colors)): recs.append(mpatches.Rectangle((0, 0), 1, 1, fc=colors[i])) ax = pylab.subplot(111) box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) ax.legend(recs, classes, loc="center left", bbox_to_anchor=(1.1, 0.5)) pylab.xticks(pylab.arange(-4, 5)) pylab.yticks(pylab.arange(-4, 5)) pylab.xlabel("z1") pylab.ylabel("z2") pylab.savefig(filename)
def scatter(self, lvl=None, **kwargs): defaults = {'c': '0.0', 'color':'k', 'facecolor':'k', 'edgecolor':'None'} defaults.update(**kwargs) xe = self.e[0] + self.dx * np.arange(0, self.im.shape[1]) ye = self.e[2] + self.dy * np.arange(0, self.im.shape[0]) x = self.x y = self.y if lvl is not None: nx = np.ceil(np.interp(x, 0.5 * (xe[:-1] + xe[1:]), range(len(xe) - 1))) ny = np.ceil(np.interp(y, 0.5 * (ye[:-1] + ye[1:]), range(len(ye) - 1))) nh = [ self.im[nx[k], ny[k]] for k in range(len(x)) ] ind = np.where(nh < np.min(lvl)) plt.scatter(x[ind], y[ind], **kwargs) else: plt.scatter(x, y, **kwargs)
def starPlot(targ_ra, targ_dec, data, iso, g_radius, nbhd): """Star bin plot""" mag_g = data[mag_g_dred_flag] mag_r = data[mag_r_dred_flag] filter = star_filter(data) iso_filter = (iso.separation(mag_g, mag_r) < 0.1) # projection of image proj = ugali.utils.projector.Projector(targ_ra, targ_dec) x, y = proj.sphereToImage(data[filter & iso_filter]['RA'], data[filter & iso_filter]['DEC']) plt.scatter(x, y, edgecolor='none', s=3, c='black') plt.xlim(0.2, -0.2) plt.ylim(-0.2, 0.2) plt.gca().set_aspect('equal') plt.xlabel(r'$\Delta \alpha$ (deg)') plt.ylabel(r'$\Delta \delta$ (deg)') plt.title('Stars')
def _plot(self): # Called from the main thread pylab.ion() if not getattr(self, 'data_available', False): return if self.peaks is not None: for key in self.sign_peaks: for channel in self.peaks[key].keys(): self.rates[key][int(channel)] += len(self.peaks[key][channel]) pylab.scatter(self.positions[0, :], self.positions[1, :], c=self.rates[key]) pylab.gca().set_title('Buffer %d' %self.counter) pylab.draw() return
def view_positions(self, indices=None, time=None): if time is None: time = 0 res = self.synthetic_store.get(indices=indices, variables=['x', 'y', 'z']) pylab.figure() all_x = [] all_y = [] all_z = [] all_c = [] for key in res.keys(): all_x += [res[key]['x'][time]] all_y += [res[key]['y'][time]] all_z += [res[key]['z'][time]] all_c += [self._scalarMap_synthetic.to_rgba(int(key))] pylab.scatter(self.probe.positions[0, :], self.probe.positions[1, :], c='k') pylab.scatter(all_x, all_y, c=all_c) pylab.show()
def plot_time_freq(self, colors=True, ax=None): import pylab as pl if ax is None: fig, allax = pl.subplots(1) ax = allax # make time matrix same shape as others t = np.outer(self.t, np.ones(self.npeaks)) f = self.f if colors: mag = 20*np.log10(self.mag) ax.scatter(t, f, s=6, c=mag, lw=0) else: mag = 100 + 20*np.log10(self.mag) ax.scatter(t, f, s=mag, lw=0) pl.xlabel('Time (s)') pl.ylabel('Frequency (Hz)') # if colors: # cs = pl.colorbar(ax=ax) # cs.set_label('Magnitude (dB)') # pl.show() return ax
def plot_time_mag(self): import pylab as pl pl.figure() t = np.outer(self.t, np.ones(self.npeaks)) # f = np.log2(self.f) f = self.f mag = 20*np.log10(self.mag) pl.scatter(t, mag, s=10, c=f, lw=0, norm=pl.matplotlib.colors.LogNorm()) pl.xlabel('Time (s)') pl.ylabel('Magnitude (dB)') cs = pl.colorbar() cs.set_label('Frequency (Hz)') # pl.show() return pl.gca()
def plot_time_freq_mag(self, minlen=10, cm=pl.cm.rainbow): cadd = 30 cmax = 256 ccur = 0 part = [pp for pp in self.partial if len(pp.f) > minlen] pl.figure() pl.hold(True) for pp in part: # pl.plot(pp.start_idx + np.arange(len(pp.f)), np.array(pp.f)) mag = 100 + 20*np.log10(np.array(pp.mag)) pl.scatter(pp.start_idx + np.arange(len(pp.f)), np.array(pp.f), s=mag, c=cm(ccur), lw=0) ccur = np.mod(ccur + cadd, cmax) pl.hold(False) pl.xlabel('Time (s)') pl.ylabel('Frequency (Hz)') pl.show()
def plot_z(z, dir=None, filename="z", xticks_range=None, yticks_range=None): if dir is None: raise Exception() try: os.mkdir(dir) except: pass fig = pylab.gcf() fig.set_size_inches(16.0, 16.0) pylab.clf() for n in xrange(z.shape[0]): result = pylab.scatter(z[n, 0], z[n, 1], s=40, marker="o", edgecolors='none') pylab.xlabel("z1") pylab.ylabel("z2") if xticks_range is not None: pylab.xticks(pylab.arange(-xticks_range, xticks_range + 1)) if yticks_range is not None: pylab.yticks(pylab.arange(-yticks_range, yticks_range + 1)) pylab.savefig("{}/{}.png".format(dir, filename))
def plot_clusters_pca(responsibilities, color_groups): from sklearn.decomposition import RandomizedPCA import pylab as pl from random import shuffle colors = list(colors_dict.values()) shuffle(colors) pca = RandomizedPCA(n_components=2) X = pca.fit_transform(responsibilities) # print >>stderr, pca.explained_variance_ratio_ pl.figure() pl.scatter(X[:, 0], X[:, 1], c="grey", label="unknown") for c, sub, i in zip(colors, color_groups, count(0)): pl.scatter(X[sub, 0], X[sub, 1], c=c, label=str(i)) pl.legend() pl.title("PCA responsibility matrix") pl.show()
def visualize_labeled_z(z_batch, label_batch, dir=None): fig = pylab.gcf() fig.set_size_inches(20.0, 16.0) pylab.clf() colors = ["#2103c8", "#0e960e", "#e40402","#05aaa8","#ac02ab","#aba808","#151515","#94a169", "#bec9cd", "#6a6551"] for n in xrange(z_batch.shape[0]): result = pylab.scatter(z_batch[n, 0], z_batch[n, 1], c=colors[label_batch[n]], s=40, marker="o", edgecolors='none') classes = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"] recs = [] for i in range(0, len(colors)): recs.append(mpatches.Rectangle((0, 0), 1, 1, fc=colors[i])) ax = pylab.subplot(111) box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) ax.legend(recs, classes, loc="center left", bbox_to_anchor=(1.1, 0.5)) pylab.xticks(pylab.arange(-4, 5)) pylab.yticks(pylab.arange(-4, 5)) pylab.xlabel("z1") pylab.ylabel("z2") pylab.savefig("%s/labeled_z.png" % dir)
def draw2D_new(self): for i in xrange(self.nComponents): k1 = np.array([[self.params[6 * i + 3] ** 2, self.params[6 * i + 3] * self.params[6 * i + 4] * self.params[6 * i + 5]], [self.params[6 * i + 3] * self.params[6 * i + 4] * self.params[6 * i + 5], self.params[6 * i + 4] ** 2]]) w1, v1 = np.linalg.eig(k1) idx = w1.argsort() w1 = w1[idx] v1 = v1[:, idx] angle=-(np.arctan(v1[1][1]/v1[0][1]))+np.pi#x+2*(pi/4-x)+pi/2#since in the image X and Y are inverted, so need to minus 90 degree and flip around pi/4 w2 = np.zeros((1 , 2)) w2[0,1] = np.sqrt(2)*np.max([self.params[6 * i + 3], self.params[6 * i + 4]]) w2[0,0] = w2[0,1]*w1[0]/w1[1] xeq = lambda t: w2[0,1] * np.cos(t) * np.cos(angle) + w2[0,0] * np.sin( t) * np.sin(angle) + self.params[6 * i + 1] yeq = lambda t: - w2[0,1] * np.cos(t) * np.sin(angle) + w2[0,0] * np.sin( t) * np.cos(angle) + self.params[6 * i + 2] t = np.linspace(0, 2 * np.pi, 100) x = xeq(t) y = yeq(t) pylab.scatter(self.params[6 * i + 2], self.params[6 * i +1], color='k') pylab.plot(y.astype(int), x.astype(int), self.colors[i] + '-')
def draw2D(self, title, image=[]): pylab.figure() if image == []: pylab.imshow(self.image, 'gray') else: pylab.imshow(image, 'gray') pylab.axis('off') pylab.autoscale(False) for i in xrange(self.nComponents): xeq = lambda t: self.params[6 * i + 3] * np.cos(t) * np.cos(self.params[6 * i + 5]) + self.params[ 6 * i + 4] * np.sin( t) * np.sin(self.params[6 * i + 5]) + self.params[6 * i + 1] yeq = lambda t: - self.params[6 * i + 3] * np.cos(t) * np.sin(self.params[6 * i + 5]) + self.params[ 6 * i + 4] * np.sin( t) * np.cos(self.params[6 * i + 5]) + self.params[6 * i + 2] t = np.linspace(0, 2 * np.pi, 100) x = xeq(t) y = yeq(t) pylab.scatter(self.params[6 * i + 2], self.params[6 * i + 1], color='k') pylab.plot(y.astype(int), x.astype(int), self.colors[i] + '-') pylab.savefig(title) pylab.close()
def predicted_vs_actual_y_input_model(self, model, x_train_split, x_test_split, y_train_split, y_test_split, title_name): # Split the training data into an extra set of test # x_train_split, x_test_split, y_train_split, y_test_split = train_test_split(x_train, y_train) print(np.shape(x_train_split), np.shape(x_test_split), np.shape(y_train_split), np.shape(y_test_split)) model.fit(x_train_split, y_train_split) y_predicted = model.predict(x_test_split) plt.figure(figsize=(10, 5)) plt.scatter(y_test_split, y_predicted, s=20) rmse_pred_vs_actual = self.rmse(y_predicted, y_test_split) plt.title(''.join([title_name, ', Predicted vs. Actual.', ' rmse = ', str(rmse_pred_vs_actual)])) plt.xlabel('Actual y') plt.ylabel('Predicted y') plt.plot([min(y_test_split), max(y_test_split)], [min(y_test_split), max(y_test_split)]) plt.tight_layout()
def plot_facade_cuts(self): facade_sig = self.facade_edge_scores.sum(0) facade_cuts = find_facade_cuts(facade_sig, dilation_amount=self.facade_merge_amount) mu = np.mean(facade_sig) sigma = np.std(facade_sig) w = self.rectified.shape[1] pad=10 gs1 = pl.GridSpec(5, 5) gs1.update(wspace=0.5, hspace=0.0) # set the spacing between axes. pl.subplot(gs1[:3, :]) pl.imshow(self.rectified) pl.vlines(facade_cuts, *pl.ylim(), lw=2, color='black') pl.axis('off') pl.xlim(-pad, w+pad) pl.subplot(gs1[3:, :], sharex=pl.gca()) pl.fill_between(np.arange(w), 0, facade_sig, lw=0, color='red') pl.fill_between(np.arange(w), 0, np.clip(facade_sig, 0, mu+sigma), color='blue') pl.plot(np.arange(w), facade_sig, color='blue') pl.vlines(facade_cuts, facade_sig[facade_cuts], pl.xlim()[1], lw=2, color='black') pl.scatter(facade_cuts, facade_sig[facade_cuts]) pl.axis('off') pl.hlines(mu, 0, w, linestyle='dashed', color='black') pl.text(0, mu, '$\mu$ ', ha='right') pl.hlines(mu + sigma, 0, w, linestyle='dashed', color='gray',) pl.text(0, mu + sigma, '$\mu+\sigma$ ', ha='right') pl.xlim(-pad, w+pad)
def predicted_vs_actual_sale_price_input_model(self, model, x_train, y_train, title_name): # Split the training data into an extra set of test x_train_split, x_test_split, y_train_split, y_test_split = train_test_split(x_train, y_train) print(np.shape(x_train_split), np.shape(x_test_split), np.shape(y_train_split), np.shape(y_test_split)) model.fit(x_train_split, y_train_split) y_predicted = model.predict(x_test_split) plt.figure(figsize=(10, 5)) plt.scatter(y_test_split, y_predicted, s=20) rmse_pred_vs_actual = self.rmse(y_predicted, y_test_split) plt.title(''.join([title_name, ', Predicted vs. Actual.', ' rmse = ', str(rmse_pred_vs_actual)])) plt.xlabel('Actual Sale Price') plt.ylabel('Predicted Sale Price') plt.plot([min(y_test_split), max(y_test_split)], [min(y_test_split), max(y_test_split)]) plt.tight_layout()
def svm_figure_generate(w, b, support_vectors, X): k = - w[0]/w[1] x = np.linspace(-5, 5) y = k*x - b/w[1] sv_1 = support_vectors[0] yy_down = k*x + (sv_1[1]-k*sv_1[0]) sv_2 = support_vectors[-1] yy_up = k*x + (sv_2[1] - k*sv_2[0]) pl.plot(x, y, 'k-') pl.plot(x, yy_up, 'k--') pl.plot(x, yy_down, 'k--') pl.scatter(support_vectors[:, 0], support_vectors[:, 1], s=80, facecolor='none') pl.scatter(X[:, 0], X[:, 1], c='Y', cmap=pl.cm.Paired) pl.axis('tight') pl.show()
def scatter_z(z_batch, filename="z"): if dir is None: raise Exception() try: os.mkdir(dir) except: pass fig = pylab.gcf() fig.set_size_inches(20.0, 16.0) pylab.clf() for n in range(z_batch.shape[0]): result = pylab.scatter(z_batch[n, 0], z_batch[n, 1], s=40, marker="o", edgecolors='none') pylab.xlabel("z1") pylab.ylabel("z2") pylab.savefig(filename)
def DrawScatt(pl, x,y, title=''): pl.figure prop = fm.FontProperties(fname="c:/windows/fonts/simsun.ttc") if title != "": pl.title(title, fontproperties=prop) pl.scatter(x,y) pl.ylabel(u"???", fontproperties=prop) pl.xlabel(u"????(?)", fontproperties=prop) pl.show() pl.close()
def draw3d(df=None, titles=None, datas=None): """?3d""" #???c?????? from mpl_toolkits.mplot3d.axes3d import Axes3D def genDf(): df = pd.DataFrame([]) for i in range(3): n = agl.array_random(100) df[i] = n return df if df is None: df = genDf() assert(len(df.columns)>=3) X, Y, Z = np.array(df[df.columns[0]]), np.array(df[df.columns[1]]), np.array(df[df.columns[2]]) fig = plt.figure(figsize=(8,6)) ax = fig.add_subplot(1, 1, 1, projection='3d') p = ax.scatter(X, Y, Z) if datas is not None: for i in range(len(datas)): df = datas[i][0] x, y, z = np.array(df[df.columns[0]]), np.array(df[df.columns[1]]), np.array(df[df.columns[2]]) c = str(datas[i][1]) ax.scatter(x,y,z, c=c) if titles is not None and len(titles)>=3: ax.set_xlabel(titles[0]) ax.set_ylabel(titles[1]) ax.set_zlabel(titles[2]) plt.show()
def scatter(self, x, y, s=20, c='b', marker='o', cmap=None, norm=None, vmin=None, vmax=None, alpha=None, linewidths=None, verts=None, hold=None, **kwargs): pl.scatter(x,y,s,c,marker,cmap,norm,vmin,vmax,alpha,linewidths,verts,hold,**kwargs)
def plot(self, contour={}, scatter={}, **kwargs): # levels = np.linspace(self.im.min(), self.im.max(), 10)[1:] levels = self.nice_levels() c_defaults = {'origin': 'lower', 'cmap': plt.cm.Greys_r, 'levels': levels} c_defaults.update(**contour) c = self.contourf(**c_defaults) lvls = np.sort(c.levels) s_defaults = {'c': '0.0', 'edgecolor':'None', 's':2} s_defaults.update(**scatter) self.scatter(lvl=[lvls], **s_defaults)
def visualize_10_2d_gaussian_prior(n_z, y_label, visualization_dir=None): z_batch = sample_z_from_n_2d_gaussian_mixture(len(y_label), n_z, y_label, 10, False) z_batch = z_batch.data fig = pylab.gcf() fig.set_size_inches(15, 12) pylab.clf() colors = ["#2103c8", "#0e960e", "#e40402", "#05aaa8", "#ac02ab", "#aba808", "#151515", "#94a169", "#bec9cd", "#6a6551"] for n in xrange(z_batch.shape[0]): result = pylab.scatter(z_batch[n, 0], z_batch[n, 1], c=colors[y_label[n]], s=40, marker="o", edgecolors='none') classes = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"] recs = [] for i in range(0, len(colors)): recs.append(mpatches.Rectangle((0, 0), 1, 1, fc=colors[i])) ax = pylab.subplot(111) box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) ax.legend(recs, classes, loc="center left", bbox_to_anchor=(1.1, 0.5)) pylab.xticks(pylab.arange(-4, 5)) pylab.yticks(pylab.arange(-4, 5)) pylab.xlabel("z1") pylab.ylabel("z2") if visualization_dir is not None: pylab.savefig("%s/10_2d-gaussian.png" % visualization_dir) pylab.show()
def visualize_labeled_z(xp, model, x, y_label, visualization_dir, epoch, gpu=False): x = chainer.Variable(xp.asarray(x)) z_batch = model.encode(x, test=True) z_batch.to_cpu() z_batch = z_batch.data fig = pylab.gcf() fig.set_size_inches(8.0, 8.0) pylab.clf() colors = ["#2103c8", "#0e960e", "#e40402", "#05aaa8", "#ac02ab", "#aba808", "#151515", "#94a169", "#bec9cd", "#6a6551"] for n in xrange(z_batch.shape[0]): result = pylab.scatter(z_batch[n, 0], z_batch[n, 1], c=colors[y_label[n]], s=40, marker="o", edgecolors='none') classes = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"] recs = [] for i in range(0, len(colors)): recs.append(mpatches.Rectangle((0, 0), 1, 1, fc=colors[i])) ax = pylab.subplot(111) box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) ax.legend(recs, classes, loc="center left", bbox_to_anchor=(1.1, 0.5)) pylab.xticks(pylab.arange(-4, 5)) pylab.yticks(pylab.arange(-4, 5)) pylab.xlabel("z1") pylab.ylabel("z2") pylab.savefig("{}/labeled_z_{}.png".format(visualization_dir, epoch)) # pylab.show()
def cmPlot(targ_ra, targ_dec, data, iso, g_radius, nbhd, type): """Color-magnitude plot""" angsep = ugali.utils.projector.angsep(targ_ra, targ_dec, data['RA'], data['DEC']) annulus = (angsep > g_radius) & (angsep < 1.) mag_g = data[mag_g_dred_flag] mag_r = data[mag_r_dred_flag] if type == 'stars': filter = star_filter(data) plt.title('Stellar Color-Magnitude') elif type == 'galaxies': filter = galaxy_filter(data) plt.title('Galactic Color-Magnitude') iso_filter = (iso.separation(mag_g, mag_r) < 0.1) # Plot background objects plt.scatter(mag_g[filter & annulus] - mag_r[filter & annulus], mag_g[filter & annulus], c='k', alpha=0.1, edgecolor='none', s=1) # Plot isochrone ugali.utils.plotting.drawIsochrone(iso, lw=2, label='{} Gyr, z = {}'.format(iso.age, iso.metallicity)) # Plot objects in nbhd plt.scatter(mag_g[filter & nbhd] - mag_r[filter & nbhd], mag_g[filter & nbhd], c='g', s=5, label='r < {:.3f}$^\circ$'.format(g_radius)) # Plot objects in nbhd and near isochrone plt.scatter(mag_g[filter & nbhd & iso_filter] - mag_r[filter & nbhd & iso_filter], mag_g[filter & nbhd & iso_filter], c='r', s=5, label='$\Delta$CM < 0.1') plt.axis([-0.5, 1, 16, 24]) plt.gca().invert_yaxis() plt.gca().set_aspect(1./4.) plt.legend(loc='upper left') plt.xlabel('g-r (mag)') plt.ylabel('g (mag)')
def projScatter(lon, lat, **kwargs): """ Create a scatter plot on HEALPix projected axes. Inputs: lon (deg), lat (deg) """ healpy.projscatter(lon, lat, lonlat=True, **kwargs) ############################################################
def drawSpatial(self, ax=None): if not ax: ax = plt.gca() # Stellar Catalog self._create_catalog() cut = (self.catalog.color > 0) & (self.catalog.color < 1) catalog = self.catalog.applyCut(cut) ax.scatter(catalog.lon,catalog.lat,c='k',marker='.',s=1) ax.set_xlim(self.glon-0.5,self.glon+0.5) ax.set_ylim(self.glat-0.5,self.glat+0.5) ax.set_xlabel('GLON (deg)') ax.set_ylabel('GLAT (deg)')
def drawHessDiagram(self,catalog=None): ax = plt.gca() if not catalog: catalog = self.get_stars() r_peak = self.kernel.extension angsep = ugali.utils.projector.angsep(self.ra, self.dec, catalog.ra, catalog.dec) cut_inner = (angsep < r_peak) cut_annulus = (angsep > 0.5) & (angsep < 1.) # deg mmin, mmax = 16., 24. cmin, cmax = -0.5, 1.0 mbins = np.linspace(mmin, mmax, 150) cbins = np.linspace(cmin, cmax, 150) color = catalog.color[cut_annulus] mag = catalog.mag[cut_annulus] h, xbins, ybins = numpy.histogram2d(color, mag, bins=[cbins,mbins]) blur = nd.filters.gaussian_filter(h.T, 2) kwargs = dict(extent=[xbins.min(),xbins.max(),ybins.min(),ybins.max()], cmap='gray_r', aspect='auto', origin='lower', rasterized=True, interpolation='none') ax.imshow(blur, **kwargs) pylab.scatter(catalog.color[cut_inner], catalog.mag[cut_inner], c='red', s=7, edgecolor='none')# label=r'$r < %.2f$ deg'%(r_peak)) ugali.utils.plotting.drawIsochrone(self.isochrone, c='b', zorder=10) ax.set_xlim(-0.5, 1.) ax.set_ylim(24., 16.) plt.xlabel(r'$g - r$') plt.ylabel(r'$g$') plt.xticks([-0.5, 0., 0.5, 1.]) plt.yticks(numpy.arange(mmax - 1., mmin - 1., -1.)) radius_string = (r'${\rm r}<%.1f$ arcmin'%( 60 * r_peak)) pylab.text(0.05, 0.95, radius_string, fontsize=10, ha='left', va='top', color='red', transform=pylab.gca().transAxes, bbox=dict(facecolor='white', alpha=1., edgecolor='none'))
def drawMembersSpatial(self,data): ax = plt.gca() if isinstance(data,basestring): filename = data data = pyfits.open(filename)[1].data xmin, xmax = -0.25,0.25 ymin, ymax = -0.25,0.25 xx,yy = np.meshgrid(np.linspace(xmin,xmax),np.linspace(ymin,ymax)) x_prob, y_prob = sphere2image(self.ra, self.dec, data['RA'], data['DEC']) sel = (x_prob > xmin)&(x_prob < xmax) & (y_prob > ymin)&(y_prob < ymax) sel_prob = data['PROB'][sel] > 5.e-2 index_sort = numpy.argsort(data['PROB'][sel][sel_prob]) plt.scatter(x_prob[sel][~sel_prob], y_prob[sel][~sel_prob], marker='o', s=2, c='0.75', edgecolor='none') sc = plt.scatter(x_prob[sel][sel_prob][index_sort], y_prob[sel][sel_prob][index_sort], c=data['PROB'][sel][sel_prob][index_sort], marker='o', s=10, edgecolor='none', cmap='jet', vmin=0., vmax=1.) # Spectral_r drawProjImage(xx,yy,None,coord='C') #ax.set_xlim(xmax, xmin) #ax.set_ylim(ymin, ymax) #plt.xlabel(r'$\Delta \alpha_{2000}\,(\deg)$') #plt.ylabel(r'$\Delta \delta_{2000}\,(\deg)$') plt.xticks([-0.2, 0., 0.2]) plt.yticks([-0.2, 0., 0.2]) divider = make_axes_locatable(ax) ax_cb = divider.new_horizontal(size="7%", pad=0.1) plt.gcf().add_axes(ax_cb) pylab.colorbar(sc, cax=ax_cb, orientation='vertical', ticks=[0, 0.2, 0.4, 0.6, 0.8, 1.0], label='Membership Probability') ax_cb.yaxis.tick_right()
def profileUpperLimit(self, delta = 2.71): """ Compute one-sided upperlimit via profile method. """ a = self.p_2 b = self.p_1 if self.vertex_x < 0: c = self.p_0 + delta else: c = self.p_0 - self.vertex_y + delta if b**2 - 4. * a * c < 0.: print 'WARNING' print a, b, c #pylab.figure() #pylab.scatter(self.x, self.y) #raw_input('WAIT') return 0. return max((numpy.sqrt(b**2 - 4. * a * c) - b) / (2. * a), (-1. * numpy.sqrt(b**2 - 4. * a * c) - b) / (2. * a)) #def bayesianUpperLimit3(self, alpha, steps = 1.e5): # """ # Compute one-sided upper limit using Bayesian Method of Helene. # """ # # Need a check to see whether limit is reliable # pdf = scipy.interpolate.interp1d(self.x, numpy.exp(self.y / 2.)) # Convert from 2 * log(likelihood) to likelihood # x_pdf = numpy.linspace(self.x[0], self.x[-1], steps) # cdf = numpy.cumsum(pdf(x_pdf)) # cdf /= cdf[-1] # cdf_reflect = scipy.interpolate.interp1d(cdf, x_pdf) # return cdf_reflect(alpha) # #return self.x[numpy.argmin((cdf - alpha)**2)]
def bayesianUpperLimit(self, alpha, steps=1.e5, plot=False): """ Compute one-sided upper limit using Bayesian Method of Helene. Several methods of increasing numerical stability have been implemented. """ x_dense, y_dense = self.densify() y_dense -= numpy.max(y_dense) # Numeric stability f = scipy.interpolate.interp1d(x_dense, y_dense, kind='linear') x = numpy.linspace(0., numpy.max(x_dense), steps) pdf = numpy.exp(f(x) / 2.) cut = (pdf / numpy.max(pdf)) > 1.e-10 x = x[cut] pdf = pdf[cut] #pdf /= pdf[0] #forbidden = numpy.nonzero(pdf < 1.e-10)[0] #if len(forbidden) > 0: # index = forbidden[0] # Numeric stability # x = x[0: index] # pdf = pdf[0: index] cdf = numpy.cumsum(pdf) cdf /= cdf[-1] cdf_reflect = scipy.interpolate.interp1d(cdf, x) #if plot: # pylab.figure() # pylab.plot(x, f(x)) # pylab.scatter(self.x, self.y, c='red') # # pylab.figure() # pylab.plot(x, pdf) # # pylab.figure() # pylab.plot(cdf, x) return cdf_reflect(alpha)
def confidenceInterval(self, alpha=0.6827, steps=1.e5, plot=False): """ Compute two-sided confidence interval by taking x-values corresponding to the largest PDF-values first. """ x_dense, y_dense = self.densify() y_dense -= numpy.max(y_dense) # Numeric stability f = scipy.interpolate.interp1d(x_dense, y_dense, kind='linear') x = numpy.linspace(0., numpy.max(x_dense), steps) # ADW: Why does this start at 0, which often outside the input range? # Wouldn't starting at xmin be better: #x = numpy.linspace(numpy.min(x_dense), numpy.max(x_dense), steps) pdf = numpy.exp(f(x) / 2.) cut = (pdf / numpy.max(pdf)) > 1.e-10 x = x[cut] pdf = pdf[cut] sorted_pdf_indices = numpy.argsort(pdf)[::-1] # Indices of PDF in descending value cdf = numpy.cumsum(pdf[sorted_pdf_indices]) cdf /= cdf[-1] sorted_pdf_index_max = numpy.argmin((cdf - alpha)**2) x_select = x[sorted_pdf_indices[0: sorted_pdf_index_max]] #if plot: # cdf = numpy.cumsum(pdf) # cdf /= cdf[-1] # print cdf[numpy.max(sorted_pdf_indices[0: sorted_pdf_index_max])] \ # - cdf[numpy.min(sorted_pdf_indices[0: sorted_pdf_index_max])] # # pylab.figure() # pylab.plot(x, f(x)) # pylab.scatter(self.x, self.y, c='red') # # pylab.figure() # pylab.plot(x, pdf) return numpy.min(x_select), numpy.max(x_select) ############################################################
def plot_candidates(self): """Plot a representation of candidate periodicity Size gives the periodicity strength, color the order of preference """ hues = np.arange(self.ncand)/float(self.ncand) hsv = np.swapaxes(np.atleast_3d([[hues,np.ones(len(hues)),np.ones(len(hues))]]),1,2) cols = hsv_to_rgb(hsv).squeeze() for per in self.periods: nc = len(per.cand_period) pl.scatter(per.time*np.ones(nc),per.cand_period,s=per.cand_strength*100,c=cols[0:nc],alpha=.5)
def plot_scatter(data, dir=None, filename="scatter", color="blue"): if dir is None: raise Exception() try: os.mkdir(dir) except: pass fig = pylab.gcf() fig.set_size_inches(16.0, 16.0) pylab.clf() pylab.scatter(data[:, 0], data[:, 1], s=20, marker="o", edgecolors="none", color=color) pylab.xlim(-4, 4) pylab.ylim(-4, 4) pylab.savefig("{}/{}.png".format(dir, filename))
def plot_scatter(data, dir=None, filename="scatter", color="blue"): if dir is None: raise Exception() try: os.mkdir(dir) except: pass fig = pylab.gcf() fig.set_size_inches(16.0, 16.0) pylab.clf() pylab.scatter(data[:, 0], data[:, 1], s=20, marker="o", edgecolors="none", color=color) pylab.xlim(-4, 4) pylab.ylim(-4, 4) pylab.savefig("{}/{}".format(dir, filename))
def visualize_z(z_batch, dir=None): if dir is None: raise Exception() try: os.mkdir(dir) except: pass fig = pylab.gcf() fig.set_size_inches(20.0, 16.0) pylab.clf() for n in xrange(z_batch.shape[0]): result = pylab.scatter(z_batch[n, 0], z_batch[n, 1], s=40, marker="o", edgecolors='none') pylab.xlabel("z1") pylab.ylabel("z2") pylab.savefig("%s/latent_code.png" % dir)
def draw2D(self): for i in xrange(self.nComponents): xeq = lambda t: self.params[6 * i + 3] * np.cos(t) * np.cos(self.params[6 * i + 5]) + self.params[ 6 * i + 4] * np.sin( t) * np.sin(self.params[6 * i + 5]) + self.params[6 * i + 1] yeq = lambda t: - self.params[6 * i + 3] * np.cos(t) * np.sin(self.params[6 * i + 5]) + self.params[ 6 * i + 4] * np.sin( t) * np.cos(self.params[6 * i + 5]) + self.params[6 * i + 2] t = np.linspace(0, 2 * np.pi, 100) x = xeq(t) y = yeq(t) pylab.scatter(self.params[6 * i + 2], self.params[6 * i + 1], color='k') pylab.plot(y.astype(int), x.astype(int), self.colors[i] + '-')
def plot_gaussians3D(self, save=False, titlehist='', pathfig='', newfig=True): ax = extract.hist2d(titlehist, newfig=newfig) dx, dy = np.indices(self.shape) for n in xrange(0, len(self.params), 6): gaussunitaire = GaussianForFit(self.image, 1, params=self.params[n:n + 6]) ax.scatter(gaussunitaire.params[1], gaussunitaire.params[2], self.image[gaussunitaire.params[1], gaussunitaire.params[2]], color=self.colors[n % 5], label="{0:.3f}".format(gaussunitaire.params[0]), alpha=0.7) ax.contour(dx, dy, gaussunitaire.gaussian, colors=self.colors[n % 5]) if save: pylab.savefig(pathfig)
def patience(log, ax=None): ax = ax or pl.gca() maxes = running_max(list(log.iteration), list(log.dev_accuracy - log.tradeoff * log.dev_runtime)) ax.scatter(maxes[:,0], maxes[:,1], lw=0)
def view_masks(file_name, t_start=0, t_stop=1, n_elec=0): params = CircusParser(file_name) data_file = params.get_data_file() data_file.open() N_e = params.getint('data', 'N_e') N_t = params.getint('detection', 'N_t') N_total = params.nb_channels sampling_rate = params.rate do_temporal_whitening = params.getboolean('whitening', 'temporal') do_spatial_whitening = params.getboolean('whitening', 'spatial') spike_thresh = params.getfloat('detection', 'spike_thresh') file_out_suff = params.get('data', 'file_out_suff') nodes, edges = get_nodes_and_edges(params) chunk_size = (t_stop - t_start)*sampling_rate padding = (t_start*sampling_rate, t_start*sampling_rate) inv_nodes = numpy.zeros(N_total, dtype=numpy.int32) inv_nodes[nodes] = numpy.argsort(nodes) safety_time = params.getint('clustering', 'safety_time') if do_spatial_whitening: spatial_whitening = load_data(params, 'spatial_whitening') if do_temporal_whitening: temporal_whitening = load_data(params, 'temporal_whitening') thresholds = load_data(params, 'thresholds') data = data_file.get_data(0, chunk_size, padding=padding, nodes=nodes) data_shape = len(data) data_file.close() peaks = {} indices = inv_nodes[edges[nodes[n_elec]]] if do_spatial_whitening: data = numpy.dot(data, spatial_whitening) if do_temporal_whitening: data = scipy.ndimage.filters.convolve1d(data, temporal_whitening, axis=0, mode='constant') for i in xrange(N_e): peaks[i] = algo.detect_peaks(data[:, i], thresholds[i], valley=True, mpd=0) pylab.figure() for count, i in enumerate(indices): pylab.plot(count*5 + data[:, i], '0.25') #xmin, xmax = pylab.xlim() pylab.scatter(peaks[i], count*5 + data[peaks[i], i], s=10, c='r') for count, i in enumerate(peaks[n_elec]): pylab.axvspan(i - safety_time, i + safety_time, facecolor='r', alpha=0.5) pylab.ylim(-5, len(indices)*5 ) pylab.xlabel('Time [ms]') pylab.ylabel('Electrode') pylab.tight_layout() pylab.setp(pylab.gca(), yticks=[]) pylab.show() return peaks
def view_classification(data_1, data_2, title=None, save=None): fig = pylab.figure() count = 0 panels = [0, 2, 1, 3] for item in [data_1, data_2]: clf, cld, X, X_raw, y = item for mode in ['predict', 'decision_function']: ax = fig.add_subplot(2, 2, panels[count]+1) if mode == 'predict': c = clf vmax = 1.0 vmin = 0.0 elif mode == 'decision_function': c = cld vmax = max(abs(numpy.amin(c)), abs(numpy.amax(c))) vmin = - vmax from circus.validating.utils import Projection p = Projection() _ = p.fit(X_raw, y) X_raw_ = p.transform(X_raw) # Plot figure. sc = ax.scatter(X_raw_[:, 0], X_raw_[:, 1], c=c, s=5, lw=0.1, cmap='bwr', vmin=vmin, vmax=vmax) cb = fig.colorbar(sc) ax.grid(True) if panels[count] in [0, 1]: if panels[count] == 0: ax.set_title('Classification Before') ax.set_ylabel("2nd component") if panels[count] == 1: ax.set_title('Classification After') cb.set_label('Prediction') elif panels[count] in [2, 3]: ax.set_xlabel("1st component") if panels[count] == 2: ax.set_ylabel("2nd component") if panels[count] == 3: cb.set_label('Decision function') count += 1 if save is None: pylab.show() else: pylab.savefig(save) pylab.close(fig) return
def drawMembership(self, ax=None, radius=None, zidx=0, mc_source_id=1): if not ax: ax = plt.gca() import ugali.analysis.scan filename = self.config.mergefile logger.debug("Opening %s..."%filename) f = pyfits.open(filename) distance_modulus = f[2].data['DISTANCE_MODULUS'][zidx] for ii, name in enumerate(self.config.params['isochrone']['infiles']): logger.info('%s %s'%(ii, name)) isochrone = ugali.isochrone.Isochrone(self.config, name) mag = isochrone.mag + distance_modulus ax.scatter(isochrone.color,mag, color='0.5', s=800, zorder=0) pix = ang2pix(self.nside, self.glon, self.glat) likelihood_pix = ugali.utils.skymap.superpixel(pix,self.nside,self.config.params['coords']['nside_likelihood']) config = self.config scan = ugali.analysis.scan.Scan(self.config,likelihood_pix) likelihood = scan.likelihood distance_modulus_array = [self.config.params['scan']['distance_modulus_array'][zidx]] likelihood.precomputeGridSearch(distance_modulus_array) likelihood.gridSearch() p = likelihood.membershipGridSearch() sep = ugali.utils.projector.angsep(self.glon, self.glat, likelihood.catalog.lon, likelihood.catalog.lat) radius = self.radius if radius is None else radius cut = (sep < radius) catalog = likelihood.catalog.applyCut(cut) p = p[cut] cut_mc_source_id = (catalog.mc_source_id == mc_source_id) ax.scatter(catalog.color[cut_mc_source_id], catalog.mag[cut_mc_source_id], c='gray', s=100, edgecolors='none') sc = ax.scatter(catalog.color, catalog.mag, c=p, edgecolors='none') ax.set_xlim(likelihood.roi.bins_color[0], likelihood.roi.bins_color[-1]) ax.set_ylim(likelihood.roi.bins_mag[-1], likelihood.roi.bins_mag[0]) ax.set_xlabel('Color (mag)') ax.set_ylabel('Magnitude (mag)') try: ax.cax.colorbar(sc) except: pylab.colorbar(sc)