def knp(nClasses, nItemsInClass, k): def generate_test_mesh(trainData): x_min = min([trainData[i][0][0] for i in range(len(trainData))]) - 1.0 x_max = max([trainData[i][0][0] for i in range(len(trainData))]) + 1.0 y_min = min([trainData[i][0][1] for i in range(len(trainData))]) - 1.0 y_max = max([trainData[i][0][1] for i in range(len(trainData))]) + 1.0 h = 0.05 testX, testY = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) return [testX, testY] train = generate_dot(nItemsInClass, nClasses) test = generate_test_mesh(train) test_mesh = classifyKNN(train, zip(test[0].ravel(), test[1].ravel()), k, nClasses) class_colormap = ListedColormap(['#FF0000', '#00FF00', '#FFFFFF']) test_colormap = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAAA']) pl.pcolormesh(test[0], test[1], np.asarray(test_mesh).reshape(test[0].shape), cmap=test_colormap) pl.scatter([train[i][0][0] for i in range(len(train))], [train[i][0][1] for i in range(len(train))], c=[train[i][1] for i in range(len(train))], cmap=class_colormap) pl.show()
def logRegress(X,Y): scores = [] for train_index, test_index in kf: X_train, X_test = X[train_index], X[test_index] y_train, y_test = Y[train_index], Y[test_index] logModel = linear_model.LogisticRegression() logModel.fit(X_train,y_train) scores.append(logModel.score(X_test, y_test)) print "Scores" , scores x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5 y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5 xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) Z = logreg.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.reshape(xx.shape) pl.figure(1, figsize=(4, 3)) pl.pcolormesh(xx, yy, Z, cmap=pl.cm.Paired) # Plot also the training points pl.scatter(X[:, 0], X[:, 1], c=Y, edgecolors='k', cmap=pl.cm.Paired) pl.xlabel('Sepal length') pl.ylabel('Sepal width') pl.xlim(xx.min(), xx.max()) pl.ylim(yy.min(), yy.max()) pl.xticks(()) pl.yticks(()) pl.show()
def fig_spec(po, pos, sig_loc, typ=0): ax1 = py.subplot(gs[pos[2]:pos[3], pos[0]:pos[1]]) set_axis(ax1, -0.05, 1.05, letter=po) freq, time_spec, spec_mtrx = spectrogram(sig_loc, Fs, nperseg=Fs) py.pcolormesh(time_spec / 60, freq, spec_mtrx, vmax=1000, cmap='Greens', norm=LogNorm(vmin=1e1, vmax=5e2)) py.ylim(0, 200) py.colorbar() py.xticks(fontsize=10) py.yticks(fontsize=10) times = [2.9, 3.4, 6.3, 6.8, 9.6, 10.1] if typ == 'naris': for i, time in enumerate(times): py.axvline(time, color='grey', ls='--', lw=1) if i % 2 == 0: py.text(time, 205, 'n.b', color='black') else: py.axvline(4, color='grey', ls='--', lw=1) py.text(4, 205, 'KX injection', color='black') # py.axvline(105, color='red', ls='--', lw=2) # py.text(80, 210, 'Naris block', color='yellow', fontsize=20) ax1.spines['right'].set_visible(False) ax1.spines['top'].set_visible(False) py.xlabel('Time (min)') py.ylabel('Frequency (Hz)')
def plotRadTilt(plot_data, plot_cmaps, plot_titles, grid, file_name, base_ref=None): subplot_base = 220 n_plots = 4 xs, ys, gs_x, gs_y, map = grid pylab.figure(figsize=(12,12)) pylab.subplots_adjust(left=0.02, right=0.98, top=0.98, bottom=0.02, wspace=0.04) for plot in range(n_plots): pylab.subplot(subplot_base + plot + 1) cmap, vmin, vmax = plot_cmaps[plot] cmap.set_under("#ffffff", alpha=0.0) pylab.pcolormesh(xs - gs_x / 2, ys - gs_y / 2, plot_data[plot] >= -90, vmin=0, vmax=1, cmap=mask_cmap) pylab.pcolormesh(xs - gs_x / 2, ys - gs_y / 2, plot_data[plot], vmin=vmin, vmax=vmax, cmap=cmap) pylab.colorbar() if base_ref is not None: pylab.contour(xs, ys, base_ref, levels=[10.], colors='k', lw=0.5) # if plot == 0: # pylab.contour(xs, ys, refl_88D[:, :, 0], levels=np.arange(10, 80, 10), colors='#808080', lw=0.5) # pylab.plot(radar_x, radar_y, 'ko') pylab.title(plot_titles[plot]) drawPolitical(map) pylab.savefig(file_name) pylab.close() return
def plot_results_with_hyperplane(clf, clf_name, df, plt_nmbr): x_min, x_max = df.x.min() - .5, df.x.max() + .5 y_min, y_max = df.y.min() - .5, df.y.max() + .5 # step between points. i.e. [0, 0.02, 0.04, ...] step = .02 # to plot the boundary, we're going to create a matrix of every possible point # then label each point as a wolf or cow using our classifier xx, yy = np.meshgrid(np.arange(x_min, x_max, step), np.arange(y_min, y_max, step)) Z = clf.predict(np.c_[xx.ravel(), yy.ravel()]) # this gets our predictions back into a matrix Z = Z.reshape(xx.shape) # create a subplot (we're going to have more than 1 plot on a given image) pl.subplot(2, 2, plt_nmbr) # plot the boundaries pl.pcolormesh(xx, yy, Z, cmap=pl.cm.Paired) # plot the wolves and cows for animal in df.animal.unique(): pl.scatter(df[df.animal==animal].x, df[df.animal==animal].y, marker=animal, label="cows" if animal=="x" else "wolves", color='black', c=df.animal_type, cmap=pl.cm.Paired) pl.title(clf_name) pl.legend(loc="best")
def i1(): # interp2d # This does not seem to work. It also does not seem to honor # bounds_error or fill_value #xx = linspace(0,2*pi,39) #yy = linspace(-2,2,41) xx = linspace(0,2*pi,12) yy = linspace(-2,2,11) X,Y = make_grid(xx,yy) Z = ff(X,Y) # Note that in this use, xx,yy and 1d, Z is 2d. # The output shapes are # fint: () () => (1,) # fint: (n,) () => (n,) # fint: () (m,) => (1,m) # fint: (n,) (m,) => (n,m) fint = scipy.interpolate.interp2d(xx,yy, Z) xfine = linspace(0.0,2*pi,99) yfine = linspace(-2,2,101) XF, YF = make_grid(xfine, yfine) ZF = fint(xfine,yfine).transpose() pl.clf() pl.pcolormesh(XF,YF,ZF) pl.colorbar()
def i5(): # griddata r, dr = 10, 4 dth = dr/(1.0*r) rr = linspace(r,r+dr,15) th = linspace(-0.5*dth,0.5*dth,16) R,TH = make_grid(rr,th) X,Y = R*cos(TH),R*sin(TH) Z = ff(X,Y) points = grid_to_points(X,Y) values = grid_to_points(Z) xfine = linspace(r,r+dr,50) yfine = linspace(-0.5*dr,0.5*dr,51) XF, YF = make_grid(xfine, yfine) desired_points = grid_to_points(XF, YF) # Only 'nearest' seems to work # 'linear' and 'cubic' just give fill value desired_values = scipy.interpolate.griddata(points, values, desired_points, method='nearest', fill_value=1.0) ZF = desired_values.reshape(XF.shape) pl.clf() pl.pcolormesh(XF, YF, ZF) pl.colorbar() return ZF
def _make_logp_histogram(points, logp, nbins, rangeci, weights, cbar): from numpy import (ones_like, searchsorted, linspace, cumsum, diff, sort, argsort, array, hstack, log10, exp) if weights == None: weights = ones_like(logp) edges = linspace(rangeci[0],rangeci[1],nbins+1) idx = searchsorted(points, edges) weightsum = cumsum(weights) heights = diff(weightsum[idx])/weightsum[-1] # normalized weights import pylab vmin,vmax,cmap = cbar cmap_steps = linspace(vmin,vmax,cmap.N+1) bins = [] # marginalized maximum likelihood for h,s,e,xlo,xhi in zip(heights,idx[:-1],idx[1:],edges[:-1],edges[1:]): if s == e: continue pv = -logp[s:e] pidx = argsort(pv) pw = weights[s:e][pidx] x = array([xlo,xhi],'d') y = hstack((0,cumsum(pw))) z = pv[pidx][:,None] # centerpoint, histogram height, maximum likelihood for each bin bins.append(((xlo+xhi)/2,y[-1],exp(vmin-z[0]))) if len(z) > cmap.N: # downsample histogram bar according to number of colors pidx = searchsorted(z[1:-1].flatten(), cmap_steps) if pidx[-1] < len(z)-1: pidx = hstack((pidx,-1)) y,z = y[pidx],z[pidx] pylab.pcolormesh(x,y,z,vmin=vmin,vmax=vmax,hold=True,cmap=cmap) # Draw bars around each histogram bin #pylab.plot([xlo,xlo,xhi,xhi],[y[0],y[-1],y[-1],y[0]],'-k',linewidth=0.1,hold=True) centers,height,maxlikelihood = array(bins).T pylab.plot(centers, maxlikelihood*max(height), '-g', hold=True)
def main(plot=True): # Setup grid g = 4 nx, ny = 200, 50 Lx, Ly = 26, 26/4 (x, y), (dx, dy) = ghosted_grid([nx, ny], [Lx, Ly], 0) # monkey patch the velocity uc = MultiFab(sizes=[nx, ny], n_ghost=4, dof=2) uc.validview[0] = (y > Ly / 3) * (2 * Ly / 3 > y) uc.validview[1] = np.sin(2 * pi * x / Lx) * .3 / (2 * pi / Lx) # state.u = np.random.rand(*x.shape) tad = BarotropicSolver() tad.geom.dx = dx tad.geom.dy = dx dt = min(dx, dy) / 4 if plot: import pylab as pl pl.ion() for i, (t, uc) in enumerate(steps(tad.onestep, uc, dt, [0.0, 10000 * dt])): if i % 100 == 0: if plot: pl.clf() pl.pcolormesh(uc.validview[0]) pl.colorbar() pl.pause(.01)
def draw_plane(k, should_draw=True): # Generate a mesh of nodes that covers all train cases def generate_background_data(trainData): border_offset = 0.5 x_min = min([trainData[i][0][0] for i in range(len(trainData))]) - border_offset x_max = max([trainData[i][0][0] for i in range(len(trainData))]) + border_offset y_min = min([trainData[i][0][1] for i in range(len(trainData))]) - border_offset y_max = max([trainData[i][0][1] for i in range(len(trainData))]) + border_offset h = 0.1 testX, testY = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) return [testX, testY] global train_data train_data = generate_data() test_mesh = generate_background_data(train_data) test_mesh_classes = classify_knn(train_data, zip(test_mesh[0].ravel(), test_mesh[1].ravel()), k) if should_draw: class_colormap = ListedColormap(['#FF9900', '#00FF00']) test_colormap = ListedColormap(['#FFCCAA', '#AAFFAA']) pl.ion() pl.pcolormesh(test_mesh[0], test_mesh[1], np.asarray(test_mesh_classes).reshape(test_mesh[0].shape), cmap=test_colormap) # pl.scatter(test_mesh[0], # test_mesh[1], # c=np.asarray(test_mesh_classes).reshape(test_mesh[0].shape), # cmap=test_colormap) pl.scatter([train_data[i][0][0] for i in range(len(train_data))], [train_data[i][0][1] for i in range(len(train_data))], c=[train_data[i][1] for i in range(len(train_data))], cmap=class_colormap) pl.show()
def autorun(self): pylab.pcolormesh(self.ex_board) pylab.colorbar() pylab.savefig("time0.png") g = 1 # if you need every time's picture just set write_frequency to 1 write_frequency = 3 while g <= self.G: print("At time level %d" % g) # Main LOOP for game for i in range(self.N): for j in range(self.N): numberOfNeighbours = self.neighbours_count(i, j) if self.ex_board[i][j] == 1 and numberOfNeighbours < 2: self.board[i][j] = 0 elif self.ex_board[i][j] == 1 and (numberOfNeighbours == 2 or numberOfNeighbours == 3): self.board[i][j] = 1 elif self.ex_board[i][j] == 1 and numberOfNeighbours > 3: self.board[i][j] = 0 elif self.ex_board[i][j] == 0 and numberOfNeighbours == 3: self.board[i][j] = 1 if g % write_frequency == 0: pylab.pcolormesh(self.board) pylab.savefig("time%d.png" % g) self.ex_board = self.board.copy() # go next g += 1
def load_data(self): self.input_file = self.get_input() if self.input_file is None: print "No input file selected. Exiting..." import sys sys.exit(0) self.nc = NC(self.input_file) nc = self.nc self.x = np.array(nc.variables['x'][:], dtype=np.double) self.y = np.array(nc.variables['y'][:], dtype=np.double) self.z = np.array(np.squeeze(nc.variables['usurf'][:]), dtype=np.double) self.thk = np.array(np.squeeze(nc.variables['thk'][:]), dtype=np.double) self.mask = dbg.initialize_mask(self.thk) print "Mask initialization: done" plt.figure(1) plt.pcolormesh(self.x, self.y, self.mask) plt.contour(self.x, self.y, self.z, colors='black') plt.axis('tight') plt.axes().set_aspect('equal') plt.show()
def compute_mask(self): import matplotlib.nxutils as nx if self.pts is not None: def correct_mask(mask, x, y, pts): for j in range(y.size): for i in range(x.size): if mask[j,i] > 0: if nx.pnpoly(x[i], y[j], pts): mask[j,i] = 2 else: mask[j,i] = 1 correct_mask(self.mask, self.x, self.y, self.pts) dbg.upslope_area(self.x, self.y, self.z, self.mask) print "Drainage basin computation: done" self.mask_computed = True plt.figure(1) plt.pcolormesh(self.x, self.y, self.mask) plt.contour(self.x, self.y, self.z, colors='black') plt.axis('tight') plt.axes().set_aspect('equal') plt.show()
def get_terminus(self): from matplotlib.widgets import Cursor if self.mask_computed == True: self.mask = dbg.initialize_mask(self.thk) plt.clf() plt.pcolormesh(self.x, self.y, self.mask) plt.contour(self.x, self.y, self.z, colors='black') plt.axis('tight') plt.axes().set_aspect('equal') plt.draw() plt.setp(plt.gca(),autoscale_on=False) cursor = Cursor(plt.axes(), useblit=True, color='white', linewidth=1 ) if self.ph is not None and self.mask_computed == False: for p in self.ph: p.remove() self.ph = None pts = [] while len(pts) < 4: pts = np.asarray( plt.ginput(4, timeout=-1) ) self.ph = plt.fill(pts[:,0], pts[:,1], 'white', lw = 2, alpha=0.5) plt.draw() self.pts = pts self.mask_computed = False
def plot_tiled_corr(C,n,cmap='seismic',midpoint_norm=True,midpoint=0.,title=None): N=n**2 # reshape to four dimensions C4d=C.reshape(n,n,n,n) # create tiled matrix C_tiled=np.zeros((N,N)) for i in xrange(n): for j in xrange(n): C_tiled[n*i:n*(i+1),n*j:n*(j+1)]=C4d[i,j,:,:] # plot correlation matrix fig=pl.figure(figsize=(12,10)) pl.subplot(111,aspect='equal') pl.subplots_adjust(left=0.05,right=0.95,top=0.95,bottom=0.05) noframe() if midpoint_norm: pl.pcolormesh(C_tiled,cmap=cmap,norm=MidPointNorm(midpoint=midpoint)) else: pl.pcolormesh(C_tiled,cmap=cmap) # draw separating lines for i in xrange(n): pl.axhline(y=i*n,color='k') pl.axvline(x=i*n,color='k') colorbar() custom_axes() if title is not None: fig.canvas.set_window_title(title) return C_tiled
def plot_2dfourier_coeffs(signal, num_comp=5, norm=Normalize(), vmin=np.NaN, vmax=np.NaN): """ Plots 2D Fourier coefficients of an hexagonal grid """ ran = np.arange(-num_comp, num_comp + 2) - 0.5 X, Y = np.meshgrid(ran, ran) zero_idx = (len(signal) - 1) / 2 signal_slice = signal[zero_idx - num_comp:zero_idx + num_comp + 1, zero_idx - num_comp:zero_idx + num_comp + 1] sig_min, sig_max = minmax(signal_slice, dec=0) if vmin is np.NaN: vmin = sig_min if vmax is np.NaN: vmax = sig_max custom_axes() pl.pcolormesh(X, Y, signal_slice, cmap='gist_yarg', norm=norm, rasterized=True, vmin=vmin, vmax=vmax) pl.xlim([-num_comp - 0.5, num_comp + 0.5]) pl.ylim([-num_comp - 0.5, num_comp + 0.5])
def the_plot(): x,y = linspace(0,2*pi,100), linspace(-2,2,100) X,Y = make_grid(x,y) Z = ff(X,Y) pl.clf() pl.pcolormesh(X,Y,Z) pl.colorbar()
def plot_map_sampes(data, random=False, num_samples=16, map_idxs=None, plot_colorbar=False): num_maps = data.shape[0] nx = int(np.sqrt(data.shape[1])) if map_idxs is None: if random is True: map_idxs = np.radnodm.randint(0, num_maps, num_samples) else: map_idxs = np.arange(num_samples) else: num_samples = len(map_idxs) nsx = int(np.ceil(np.sqrt(num_samples))) nsy = int(np.floor(np.sqrt(num_samples))) pl.figure(figsize=(8, 8)) for idx, map_idx in enumerate(map_idxs): pl.subplot(nsx, nsy, idx + 1, aspect='equal') noframe() pl.pcolormesh(data[map_idx, :].reshape(nx, nx).T) if plot_colorbar: colorbar()
def i2(): # interp2d -- do it on a structured gird, but use calling # convention for unstructured grid. xx = linspace(0,2*pi,15) yy = linspace(-2,2,14) X,Y = make_grid(xx,yy) Z = ff(X,Y) # The output shapes are # fint: () () => (1,) # fint: (n,) () => (n,) # fint: () (m,) => (1,m) # fint: (n,) (m,) => (n,m) # # Linear interpolation is all messed up. Cant get sensible results. # Cubic seems extremely sensitive to the exact number of data points. # Doesn't respect bounds_error # Doesn't respect fill_value # Doesn't respect copy fint = scipy.interpolate.interp2d(X,Y,Z, kind='quintic') xfine = linspace(-2,2*pi+2,99) yfine = linspace(-4,4,101) XF, YF = make_grid(xfine, yfine) # NB -- TRANSPOSE HERE! ZF = fint(xfine,yfine).transpose() pl.clf() pl.pcolormesh(XF, YF, ZF) pl.colorbar()
def plot_occupancy(self): import pylab as pl from plotlib import custom_axes p_hist, x, y = np.histogram2d(self.pos[self.pidx_vect, 0], self.pos[self.pidx_vect, 1], range=[[-self.L / 2, self.L / 2], [-self.L / 2, self.L / 2]], bins=50) pl.figure() # plot the results pl.subplot(111, aspect='equal') custom_axes() pl.xlim(-self.L / 2, self.L / 2) pl.ylim(-self.L / 2, self.L / 2) pl.pcolormesh(x, y, p_hist) pl.colorbar() pl.xlabel('X bin') pl.ylabel('Y bin') pl.title('Visits') pl.figure() pl.hist(self.pidx_vect, color='k', bins=100) pl.figure() pl.plot(self.pidx_vect, '-k')
def i6(): # Rbf: this works great except that somewhat weird stuff happens # when you're off the grid. r, dr = 10, 4 dth = dr/(1.0*r) rr = linspace(r,r+dr,25) th = linspace(-0.5*dth,0.5*dth,26) R,TH = make_grid(rr,th) X,Y = R*cos(TH),R*sin(TH) Z = ff(X,Y) xlist, ylist, zlist = X.ravel(), Y.ravel(), Z.ravel() # function, epsilon, smooth, norm, fint = scipy.interpolate.Rbf(xlist, ylist, zlist ) xfine = linspace(-10+r,r+dr+10,99) yfine = linspace(-10-0.5*dr,10+0.5*dr,101) XF, YF = make_grid(xfine, yfine) ZF = fint(XF, YF) pl.clf() pl.pcolormesh(XF, YF, ZF) pl.colorbar() return ZF
def show_data_on_mesh_after_classify(n_classes, n_items_in_class, k): # Создаем сетку узлов, которая охватывает все случаи train def generate_test_mesh(input_data): x_min = min([input_data[i][0][0] for i in range(len(input_data))]) - 1.0 x_max = max([input_data[i][0][0] for i in range(len(input_data))]) + 1.0 y_min = min([input_data[i][0][1] for i in range(len(input_data))]) - 1.0 y_max = max([input_data[i][0][1] for i in range(len(input_data))]) + 1.0 h = 0.05 test_x, test_y = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) return [test_x, test_y] train_data = generate_data(n_items_in_class, n_classes) test_mesh = generate_test_mesh(train_data) test_mesh_labels = knn_classify( train_data, zip(test_mesh[0].ravel(), test_mesh[1].ravel()), k, n_classes) class_colormap = ListedColormap(['#a84747', '#26ff00', '#5881bf']) test_colormap = ListedColormap(['#ffae00', '#00ffc3', '#c9f2ca']) pl.pcolormesh(test_mesh[0], test_mesh[1], np.asarray(test_mesh_labels).reshape(test_mesh[0].shape), cmap=test_colormap) pl.scatter([train_data[i][0][0] for i in range(len(train_data))], [train_data[i][0][1] for i in range(len(train_data))], c=[train_data[i][1] for i in range(len(train_data))], cmap=class_colormap) pl.show()
def __call__(self, n): if len(self.f.shape) == 3: # f = f[x,v,t], 2 dim in phase space ft = self.f[n,:,:] pylab.pcolormesh(self.X, self.V, ft.T, cmap = 'jet') pylab.colorbar() pylab.clim(0,0.38) # for Landau test case pylab.grid() pylab.axis([self.xmin, self.xmax, self.ymin, self.ymax]) pylab.xlabel('$x$', fontsize = 18) pylab.ylabel('$v$', fontsize = 18) pylab.title('$N_x$ = %d, $N_v$ = %d, $t$ = %2.1f' % (self.x.N, self.v.N, self.it*self.t.width)) pylab.savefig(self.path + self.filename) pylab.clf() return None if len(self.f.shape) == 2: # f = f[x], 1 dim in phase space ft = self.f[n,:] pylab.plot(self.x.gridvalues,ft,'ob') pylab.grid() pylab.axis([self.xmin, self.xmax, self.ymin, self.ymax]) pylab.xlabel('$x$', fontsize = 18) pylab.ylabel('$f(x)$', fontsize = 18) pylab.savefig(self.path + self.filename) return None
def plotFrequencyMap(self, llcrnrlon=-119.2, llcrnrlat=23.15, urcrnrlon=-65.68, urcrnrlat=48.7): import pylab pylab.figure(figsize=(10, 6)) pylab.subplots_adjust(0, 0, 1, 1) bmap = Basemap(projection="lcc", llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat, resolution='l', lat_0=38.5, lat_1=38.5, lon_0=-97.0) counts, x, y = self.countMDPoints(bmap) bmap.drawcoastlines() bmap.drawcountries(1.0) bmap.drawstates(0.5) pylab.pcolormesh(x, y, counts) pylab.colorbar(orientation='horizontal', format='%d', extend='max', fraction=.06, aspect=65, shrink=.6, pad=0)
def make(conditioned, aligned, grafica, time, psd): t, f, p = conditioned.whiten(4, 4).qtransform(.001, logfsteps=100, qrange=(8, 8), frange=(150, 2048)) pylab.figure(figsize=[15, 5]) pylab.title('Peak time qtransform') pylab.pcolormesh(t, f, p**0.5, vmin=1, vmax=7) pylab.yscale('log') pylab.xlabel('Time (s)') pylab.ylabel('Frequency (Hz)') pylab.xlim(time - 0.05, time + 0.35) pylab.show() pylab.figure(figsize=[15, 5]) pylab.title('Power Spectral Density') pylab.loglog(psd.sample_frequencies, np.sqrt(psd)) pylab.ylabel('$Strain^2 / Hz$') pylab.xlabel('Frequency (Hz)') pylab.grid() pylab.xlim(150, 4096) pylab.show() return
def plotLattice(self): # select only the interesting parts M = self.maxradius grph = self.L - M, self.L + M # and plot axis = arange(-M, M + 1) plot = self.lattice[grph[0]:grph[1], grph[0]:grph[1]] pcolormesh(axis, axis, plot) axes().set_aspect('equal', 'datalim') show() util.init_logger(verbose=True) with Plotter(verbose=True, dryrun=False) as p: xmid = (p.xmin + p.xmax) / 2.0 ymid = (p.ymin + p.ymax) / 2.0 xsize = p.xmax - p.xmin ysize = p.ymax - p.ymin norm_axis = axis / float(max(axis)) scaled_axis = norm_axis * 0.3 * ysize xaxis = scaled_axis + xmid yaxis = scaled_axis + ymid radius = (scaled_axis[1] - scaled_axis[0]) / 2.0 for i in range(plot.shape[0]): for j in range(plot.shape[1]): if plot[i, j] > 0: p.write_circle([xaxis[i], yaxis[j]], radius)
def plotter(p, view=None): import pylab if len(p) == 1: x = p[0] r = np.linspace(range[0], range[1], 400) pylab.plot(x + r, [fn(v) for v in x + r]) pylab.xlabel(args[0]) pylab.ylabel("-log P(%s)" % args[0]) else: r = np.linspace(range[0], range[1], 20) x, y = p[args[0]], p[args[1]] data = np.empty((len(r), len(r)), 'd') for j, xj in enumerate(x + r): for k, yk in enumerate(y + r): p[args[0]], p[args[1]] = xj, yk data[j, k] = fn(p) pylab.pcolormesh(x + r, y + r, data) pylab.plot(x, y, 'o', hold=True, markersize=6, markerfacecolor='red', markeredgecolor='black', markeredgewidth=1, alpha=0.7) pylab.xlabel(args[0]) pylab.ylabel(args[1])
def plot(self, logz=False, norm=None, zrange=None, **kwargs): from matplotlib import colors import pylab as plt z = np.ma.masked_where(self.z == 0, self.z) if norm is None: pass elif norm == 'x': for x in range(z.shape[0]): z[x, :] = z[x, :] / z[x, :].sum() elif norm == 'y': for y in range(z.shape[1]): z[:, y] = z[:, y] / z[:, y].sum() else: raise ValueError("Unknown value for norm: {}".format(norm)) if zrange is None: zrange = (z.min(), z.max()) if logz: norm = colors.LogNorm(vmin=zrange[0], vmax=zrange[1]) else: norm = colors.Normalize(vmin=zrange[0], vmax=zrange[1]) plt.pcolormesh(self.xedges, self.yedges, z.T, norm=norm) plt.colorbar() plt.xlim(self.xedges[0], self.xedges[-1]) plt.ylim(self.yedges[0], self.yedges[-1]) if self.logx: plt.xscale('log') if self.logy: plt.yscale('log')
def draw(figure): global resources, time, theWorld, cmap, files PL.cla() cmap.set_under() PL.pcolormesh(theWorld.foraging_resources, cmap = cmap, vmin=0, vmax=W.max_resource) PL.axis('scaled') PL.hold(True) xyp = zip(*[theWorld.hh_locations[hh] for hh in theWorld.households]) xy = [list(t) for t in xyp] if len(xy)>0: x = [i+0.5 for i in xy[0]] y = [i+0.5 for i in xy[1]] lineage = [hh.lineage for hh in (theWorld.households)] hh_size = [20*hh.size() for hh in (theWorld.households)] PL.scatter(y, x, c = lineage, s=hh_size, vmin=0, vmax=W.starting_agents, cmap = plt.get_cmap('hsv')) message = r't = {0} Pop.: {1} HHs: {2} max HHs: {3}' PL.title(message.format(time, theWorld.population, len(theWorld.households), max(lineage))) PL.hold(False) figure.tight_layout() if MOVIES: fname = dirName+('\\_temp%05d.png'%time) figure.savefig(fname) files.append(fname) drawPlots()
def hess_plot(targ_ra, targ_dec, data, iso, g_radius, nbhd): """Hess plot""" filter = filters.star_filter(survey, data) plt.title('Hess') c1 = SkyCoord(targ_ra, targ_dec, unit='deg') r_near = 2. * g_radius # annulus begins at 2*g_radius away from centroid r_far = np.sqrt(5.) * g_radius # annulus has same area as inner area #inner = (c1.separation(SkyCoord(data[basis_1], data[basis_2], unit='deg')).deg < g_radius) #outer = (c1.separation(SkyCoord(data[basis_1], data[basis_2], unit='deg')).deg > r_near) & (c1.separation(SkyCoord(data[basis_1], data[basis_2], unit='deg')).deg < r_far) angsep = ugali.utils.projector.angsep(targ_ra, targ_dec, data[basis_1], data[basis_2]) inner = (angsep < g_radius) outer = ((angsep > r_near) & (angsep < r_far)) xbins = np.arange(-0.5, 1.1, 0.1) ybins = np.arange(16., mag_max + 0.5, 0.5) foreground = np.histogram2d(data[mag_dered_1][inner & filter] - data[mag_dered_2][inner & filter], data[mag_dered_1][inner & filter], bins=[xbins, ybins]) background = np.histogram2d(data[mag_dered_1][outer & filter] - data[mag_dered_2][outer & filter], data[mag_dered_1][outer & filter], bins=[xbins, ybins]) fg = foreground[0].T bg = background[0].T fg_abs = np.absolute(fg) bg_abs = np.absolute(bg) mask_abs = fg_abs + bg_abs mask_abs[mask_abs == 0.] = np.nan # mask signficiant zeroes signal = fg - bg signal = np.ma.array(signal, mask=np.isnan(mask_abs)) # mask nan cmap = matplotlib.cm.viridis cmap.set_bad('w', 1.) plt.pcolormesh(xbins, ybins, signal, cmap=cmap) plt.colorbar() ugali.utils.plotting.drawIsochrone(iso, lw=2, c='k', zorder=10, label='Isocrhone') plt.axis([-0.5, 1.0, 16, mag_max]) plt.gca().invert_yaxis() plt.gca().set_aspect(1. / 4.) plt.xlabel('{} - {} (mag)'.format(band_1.lower(), band_2.lower())) plt.ylabel('{} (mag)'.format(band_1.lower()))
def plot_results_with_hyperplane(clf, clf_name, df, plt_nmbr, debug=False): x_min, x_max = df.x.min() - .5, df.x.max() + .5 y_min, y_max = df.y.min() - .5, df.y.max() + .5 # step between points, i.e. [0, 0.02, 0.04, ...] step = 0.02 # to plot the boundary, we're going to cr4eate a matrix of every possible point # then label each point as a wolf or cow using our classifiers xx, yy = np.meshgrid(np.arange(x_min, x_max, step), np.arange(y_min, y_max, step)) Z = clf.predict(np.c_[xx.ravel(), yy.ravel()]) # this gets our predictions back into a matrix Z = Z.reshape(xx.shape) # create subplot (we're going to have more than 1 plot on a given image) pl.subplot(2, 2, plt_nmbr) # plot the boundaries pl.pcolormesh(xx, yy, Z, cmap=pl.cm.Paired) # plot the wolves and cows for animal in df.animal.unique(): pl.scatter(df[df.animal == animal].x, df[df.animal == animal].y, marker=animal, label="cows" if animal == "x" else "wolves", color='black') pl.title(clf_name) pl.legend(loc='best')
def plot_iris_classification(classifier=None, **kwargs): if classifier is None: classifier = neighbors.KNeighborsClassifier iris = datasets.load_iris() X = iris.data[:, :2] # we only take the first two features. We could # avoid this ugly slicing by using a two-dim dataset y = iris.target knn = classifier(**kwargs) knn.fit(X, y) x_min, x_max = X[:, 0].min() - .1, X[:, 0].max() + .1 y_min, y_max = X[:, 1].min() - .1, X[:, 1].max() + .1 xx, yy = np.meshgrid(np.linspace(x_min, x_max, 100), np.linspace(y_min, y_max, 100)) Z = knn.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.reshape(xx.shape) pl.figure() pl.pcolormesh(xx, yy, Z, cmap=cmap_light) # Plot also the training points pl.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold) pl.xlabel('sepal length (cm)') pl.ylabel('sepal width (cm)') pl.axis('tight')
def plot_prediction_grid(xx, yy, prediction_grid, filename): """ Plot KNN predictions for every point on the grid.""" from matplotlib.colors import ListedColormap background_colormap = ListedColormap( ["hotpink", "lightskyblue", "yellowgreen"]) observation_colormap = ListedColormap(["red", "blue", "green"]) plt.figure(figsize=(15, 10)) plt.pcolormesh(xx, yy, prediction_grid, cmap=background_colormap, alpha=0.5, rasterized=True) plt.scatter(predictors[:, 0], predictors[:, 1], c=outcomes, cmap=observation_colormap, s=100) plt.xlabel('Variable 1') plt.ylabel('Variable 2') plt.xticks(()) plt.yticks(()) plt.xlim(np.min(xx), np.max(xx)) plt.ylim(np.min(yy), np.max(yy)) plt.savefig(filename) '''
def showDataOnMesh(nClasses, nItemsInClass, k): #Generate a mesh of nodes that covers all train cases def generateTestMesh(trainData): x_min = min([trainData[i][0][0] for i in range(len(trainData))]) - 1.0 x_max = max([trainData[i][0][0] for i in range(len(trainData))]) + 1.0 y_min = min([trainData[i][0][1] for i in range(len(trainData))]) - 1.0 y_max = max([trainData[i][0][1] for i in range(len(trainData))]) + 1.0 h = 0.05 testX, testY = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) return [testX, testY] trainData = generateData(nItemsInClass, nClasses) testMesh = generateTestMesh(trainData) testMeshLabels = classifyKNN(trainData, zip(testMesh[0].ravel(), testMesh[1].ravel()), k, nClasses) classColormap = ListedColormap(['#FF0000', '#00FF00', '#FFFFFF']) testColormap = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAAA']) pl.pcolormesh(testMesh[0], testMesh[1], np.asarray(testMeshLabels).reshape(testMesh[0].shape), cmap=testColormap) pl.scatter([trainData[i][0][0] for i in range(len(trainData))], [trainData[i][0][1] for i in range(len(trainData))], c=[trainData[i][1] for i in range(len(trainData))], cmap=classColormap) pl.show()
def plotLL(fname='out4.npy'): plt.figure() h= np.linspace(0,1,21) g= np.linspace(0,1,21) m=np.linspace(0,2,21) d=np.linspace(0,2,21) out=np.load(fname) print np.nanmax(out),np.nanmin(out) rang=np.nanmax(out)-np.nanmin(out) maxloc= np.squeeze(np.array((np.nanmax(out)==out).nonzero())) H,G=np.meshgrid(h,g) print maxloc for mm in range(m.size/2): for dd in range(d.size/2): plt.subplot(10,10,(9-mm)*10+dd+1) plt.pcolormesh(h,g,out[:,:,mm*2,dd*2].T, vmax=np.nanmax(out),vmin=np.nanmax(out)-rang/4.) plt.gca().set_xticks([]) plt.gca().set_yticks([]) if mm==maxloc[2]/2 and dd==maxloc[3]/2: plt.plot(h[maxloc[0]],g[maxloc[1]],'ow',ms=8) if dd==0: print mm,dd plt.ylabel('%.1f'%m[mm*2]) if mm==0: plt.xlabel('%.1f'%d[dd*2]) plt.title(fname[:6])
def plot_iris_nb(): iris = datasets.load_iris() X = iris.data[:, :2] # we only take the first two features. We could # avoid this ugly slicing by using a two-dim dataset y = iris.target x_min, x_max = X[:, 0].min() - .1, X[:, 0].max() + .1 y_min, y_max = X[:, 1].min() - .1, X[:, 1].max() + .1 xx, yy = np.meshgrid(np.linspace(x_min, x_max, 100), np.linspace(y_min, y_max, 100)) nb = naive_bayes.GaussianNB() nb.fit(X, y) Z = nb.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.reshape(xx.shape) pl.figure() pl.pcolormesh(xx, yy, Z, cmap=cmap_light) # Plot also the training points pl.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold) pl.xlabel('sepal length (cm)') pl.ylabel('sepal width (cm)') pl.axis('tight')
def outside_juristiction(self, data): data = copy.deepcopy(data) xynames = ['X', 'Y'] data['Prediction'] = pandas.Series(self.clf.predict(data[xynames])) data = data[data.PdDistrictInt != data.Prediction] data = data.reset_index(drop=True) data['Prob'] = pandas.Series( [max(x) for x in self.clf.predict_proba(data[xynames])] ) data = data[data.Prob > 0.95].reset_index(drop=True) fig = pl.figure() x_min, x_max = data.X.min() - 0.01, data.X.max() + 0.01 y_min, y_max = data.Y.min() - 0.01, data.Y.max() + 0.01 xx, yy = np.meshgrid( np.arange(x_min, x_max, self.step), np.arange(y_min, y_max, self.step) ) Z = self.clf.predict(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) pl.xlim(xx.min(), xx.max()) pl.ylim(yy.min(), yy.max()) pl.pcolormesh(xx, yy, Z, cmap='jet', alpha=1.0) pl.scatter(data.X, data.Y, c=data.PdDistrictInt, cmap='jet', alpha=1.0) pl.savefig('plots/knn_PD_2.png') pl.close(fig) return data
def plot_iris_knn(): iris = datasets.load_iris() X = iris.data[:, :2] # we only take the first two features. We could # avoid this ugly slicing by using a two-dim dataset y = iris.target knn = neighbors.KNeighborsClassifier(n_neighbors=3) knn.fit(X, y) x_min, x_max = X[:, 0].min() - .1, X[:, 0].max() + .1 y_min, y_max = X[:, 1].min() - .1, X[:, 1].max() + .1 xx, yy = np.meshgrid(np.linspace(x_min, x_max, 100), np.linspace(y_min, y_max, 100)) Z = knn.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.reshape(xx.shape) pl.figure() pl.pcolormesh(xx, yy, Z, cmap=cmap_light) # Plot also the training points pl.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold) pl.xlabel('sepal length (cm)') pl.ylabel('sepal width (cm)') pl.axis('tight')
def mainCheck(argv): ''' Check pre-calculated representations ''' #FIXME: refresh or simply delete raise SystemExit print('>> SPECIAL MODE: FEATURE CHECK') print(' ', f'check file {sys.argv[2]} for', f'{sys.argv[3]} samples') from collections import Counter from pprint import pprint cnnfeat = pickle.load(open(sys.argv[2], 'rb')) samples = np.concatenate([cnnfeat[k] for k in random.choices(list(cnnfeat.keys()), k=int(sys.argv[3]))]) #samples = np.concatenate([v for k, v in cnnfeat.items()]) samples = np.clip(samples, a_min=0, a_max=None) ctr = Counter() ctr.update(np.argmax(samples, axis=1)) pprint(ctr) print(featstat(samples)) print('var axis 0', np.var(samples, axis=0).mean()) print('var axis 1', np.var(samples, axis=1).mean()) pylab.pcolormesh(samples, cmap='cool') pylab.colorbar() pylab.show() print('> TEST') data = CocoVRepDataset('./coco.all.res18') print(len(data), data[0]) for cnnfeat, imageid in data: pass print('> test ok') exit()
def plotTiming(vortex_prob, vortex_times, times, map, grid_spacing, tornado_track, title, file_name, obs=None, centers=None, min_prob=0.1): nx, ny = vortex_prob.shape gs_x, gs_y = grid_spacing xs, ys = np.meshgrid(gs_x * np.arange(nx), gs_y * np.arange(ny)) time_color_map = matplotlib.cm.Accent time_color_map.set_under('#ffffff') vortex_times = np.where(vortex_prob >= min_prob, vortex_times, -1) track_xs, track_ys = map(*reversed(tornado_track)) pylab.figure(figsize=(10, 8)) pylab.axes((0.025, 0.025, 0.95, 0.925)) pylab.pcolormesh(xs, ys, vortex_times, cmap=time_color_map, vmin=times.min(), vmax=times.max()) tick_labels = [ (datetime(2009, 6, 5, 18, 0, 0) + timedelta(seconds=int(t))).strftime("%H%M") for t in times ] bar = pylab.colorbar()#orientation='horizontal', aspect=40) bar.locator = FixedLocator(times) bar.formatter = FixedFormatter(tick_labels) bar.update_ticks() pylab.plot(track_xs, track_ys, 'mv-', lw=2.5, mfc='k', ms=8) drawPolitical(map, scale_len=(xs[-1, -1] - xs[0, 0]) / 10.) pylab.title(title) pylab.savefig(file_name) pylab.close()
def plotter(view=None, **kw): x,y = kw[args[0]],kw[args[1]] r = linspace(range[0],range[1],200) X,Y = meshgrid(x+r,y+r) kw['x'],kw['y'] = X,Y pylab.pcolormesh(x+r,y+r,nllf(**kw)) pylab.plot(x,y,'o',hold=True, markersize=6,markerfacecolor='red',markeredgecolor='black',markeredgewidth=1, alpha=0.7)
def SVCTwoDValidationCurve(DataTable,Gammavec,Cvec, kernel_,n): acc_train, acc_val, trainMinusValidation = [], [], [] for thisGamma in Gammavec: for thisC in Cvec: X,classifier= (DataTable[0:n-1,1:].astype(np.float), DataTable[0:n-1,0].astype(np.int)) X_val,classifier_val=(DataTable[n:,1:].astype(np.float), DataTable[n:,0].astype(np.int)) if kernel_=='rbf': TrainedModel=SVC(C=thisC, kernel=kernel_, gamma=thisGamma).fit(X,classifier) #acc_train.append(TrainedModel.score(X,classifier)) #acc_val.append(TrainedModel.score(X_val, classifier_val)) trainMinusValidation.append(TrainedModel.score(X,classifier)-TrainedModel.score(X_val, classifier_val)) X, Y = np.meshgrid(Cvec,Gammavec) #trainMinusValidation=np.array(acc_train)-np.array(acc_val) fig=plt.figure() #ax=plt.gca() #train=ax.scatter(Gammavec,acc_train, color='red') #crossval=ax.scatter(Gammavec,acc_val, color='blue') #ax.set_yscale('log') plt.pcolormesh(X,Y,np.array(trainMinusValidation)) plt.colorbar() #need a colorbar to show the intensity scale plt.xlabel("Value of parameter C") plt.ylabel("Value of parameter gamma") #plt.legend([train,crossval], ["Training accuracy","Validation accuracy"]) fig.savefig('validation2DCurve.png')
def plot_knn_boundary(): ## Training dataset preparation # use sklearn iris dataset iris_dataset = datasets.load_iris() # first two dimensions as the features # it's easy to plot boundary in 2D train_data = iris_dataset.data[:,:2] print "init:",train_data # get labels labels = iris_dataset.target # labels print "init2:",labels ## Test dataset preparation h = 0.1 x0_min = train_data[:,0].min() - 0.5 x0_max = train_data[:,0].max() + 0.5 x1_min = train_data[:,1].min() - 0.5 x1_max = train_data[:,1].max() + 0.5 x0_features, x1_features = np.meshgrid(np.arange(x0_min, x0_max, h), np.arange(x1_min, x1_max, h)) # test dataset are samples from the whole regions of feature domains test_data = np.c_[x0_features.ravel(), x1_features.ravel()] ## KNN classification p_labels = [] # prediction labels for test_sample in test_data: # knn prediction p_label = knn_predict(train_data, labels, test_sample, n_neighbors = 6) p_labels.append(p_label) # list to array p_labels = np.array(p_labels) p_labels = p_labels.reshape(x0_features.shape) ## Boundary plotting 边界策划 pl.figure(1) pl.set_cmap(pl.cm.Paired) pl.pcolormesh(x0_features, x1_features, p_labels) pl.scatter(train_data[:,0], train_data[:,1], c = labels) # x y轴的名称 pl.xlabel('feature 0') pl.ylabel('feature 1') # 设置x,y轴的上下限 pl.xlim(x0_features.min(), x0_features.max()) pl.ylim(x1_features.min(), x1_features.max()) # 设置x,y轴记号 pl.xticks(()) pl.yticks(()) pl.show()
def showDataOnClass(nClasses, nItemsInClass, k): def generateTestMesh(trainData): x = [] y = [] for i in range(len(trainData)): x.append(trainData[i][0][0]) y.append(trainData[i][0][1]) x_min = min(x) x_max = max(x)-min(x) y_min = min(y) y_max = max(y)-min(y) new_X, new_Y = np.meshgrid(np.arange(x_min, x_max), np.arange(y_min, y_max)) return [new_X, new_Y] trainData = generateData(nItemsInClass, nClasses) testMesh = generateTestMesh(trainData) testMeshLabels = classifyKNN(trainData, zip(testMesh[0].ravel(), testMesh[1].ravel()), k, nClasses) classColormap = ListedColormap(['#FF0000', '#00FF00', '#FFFFFF']) testColormap = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAAA']) pl.pcolormesh(testMesh[0], testMesh[1], np.asarray(testMeshLabels).reshape(testMesh[0].shape), cmap=testColormap) pl.scatter([trainData[i][0][0] for i in range(len(trainData))], [trainData[i][0][1] for i in range(len(trainData))], c=[trainData[i][1] for i in range(len(trainData))], cmap=classColormap) pl.show()
def play(self): """ Play Conway's Game of Life. """ pylab.pcolormesh(self.old_grid) pylab.colorbar() pylab.savefig("generation0.png") t = 1 write_frequency = 5 while t <= self.T: print "At time level %d" % t for i in range(self.N): for j in range(self.N): live = self.live_neighbours(i, j) if (self.old_grid[i][j] == 1 and live < 2): self.new_grid[i][j] = 0 elif (self.old_grid[i][j] == 1 and (live == 2 or live == 3)): self.new_grid[i][j] = 1 elif (self.old_grid[i][j] == 1 and live > 3): self.new_grid[i][j] = 0 elif (self.old_grid[i][j] == 0 and live == 3): self.new_grid[i][j] = 1 if (t % write_frequency == 0): pylab.pcolormesh(self.new_grid) pylab.savefig("generation%d.png" % t) self.old_grid = self.new_grid.copy() t += 1
def showDataOnMesh (nClasses, nItemsInClass, k): #Generate a mesh of nodes that covers all train cases def generateTestMesh (trainData): x_min = min( [trainData[i][0][0] for i in range(len(trainData))] ) - 1.0 x_max = max( [trainData[i][0][0] for i in range(len(trainData))] ) + 1.0 y_min = min( [trainData[i][0][1] for i in range(len(trainData))] ) - 1.0 y_max = max( [trainData[i][0][1] for i in range(len(trainData))] ) + 1.0 h = 0.05 testX, testY = numpy.meshgrid(numpy.arange(x_min, x_max, h), numpy.arange(y_min, y_max, h)) return [testX, testY] trainData = generateData (nItemsInClass, nClasses) testMesh = generateTestMesh (trainData) testMeshLabels = classifyKNN (trainData, zip(testMesh[0].ravel(), testMesh[1].ravel()), k, nClasses) classColormap = ListedColormap(['#FF0000', '#00FF00', '#FFFFFF']) testColormap = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAAA']) pylab.pcolormesh(testMesh[0], testMesh[1], numpy.asarray(testMeshLabels).reshape(testMesh[0].shape), cmap=testColormap) pylab.scatter([trainData[i][0][0] for i in range(len(trainData))], [trainData[i][0][1] for i in range(len(trainData))], c=[trainData[i][1] for i in range(len(trainData))], cmap=classColormap) pylab.show()
def plotGridPref(gridscore, clfName, obj , metric = 'roc_auc'): ''' Plot Grid Performance ''' # data_path = data_path+obj+'/'+clfName+'_opt.h5' # f=hp.File(data_path, 'r') # gridscore = f['grids_score'].value # Get numblocks CV = np.unique(gridscore["i_CV"]) folds = np.unique(gridscore["i_fold"]) numblocks = len(CV) * len(folds) paramNames = gridscore.dtype.fields.keys() paramNames.remove("mean_validation_score") paramNames.remove("std") paramNames.remove("i_CV") paramNames.remove("i_fold") score = gridscore["mean_validation_score"] std = gridscore["std"] newgridscore = gridscore[paramNames] num_params = len(paramNames) ### get index of hit ### hitindex = [] n_iter = len(score)/numblocks for k in range(numblocks): hit0index = np.argmax(score[k*n_iter: (k+1)*n_iter]) hitindex.append(k*n_iter+hit0index ) for m in range(num_params-1): i = paramNames[m] x = newgridscore[i] for n in range(m+1, num_params): # for j in list(set(paramNames)- set([i])): j = paramNames[n] y = newgridscore[j] compound = [x,y] # Only plot heat map if dtype of all elements of x, y are int or float if [True]* len(compound)== map(lambda t: np.issubdtype(t.dtype, np.float) or np.issubdtype(t.dtype, np.int), compound): gridsize = 50 fig = pl.figure() points = np.vstack([x,y]).T #####Construct MeshGrids########## xnew = np.linspace(max(x), min(x), gridsize) ynew = np.linspace(max(y), min(y), gridsize) X, Y = np.meshgrid(xnew, ynew) #####Interpolate Z on top of MeshGrids####### Z = griddata(points, score, (X, Y), method = "cubic", tol = 1e-2) z_min = min(score) z_max = max(score) pl.pcolormesh(X,Y,Z, cmap='RdBu', vmin=z_min, vmax=z_max) pl.axis([x.min(), x.max(), y.min(), y.max()]) pl.xlabel(i, fontsize = 30) pl.ylabel(j, fontsize = 30) cb = pl.colorbar() cb.set_label(metric, fontsize = 30) ##### Mark the "hit" points ####### hitx = x[hitindex] hity = y[hitindex] pl.plot(hitx, hity, 'rx') # Save the plot save_path = plot_path +obj+'/'+ clfName +'_' +metric+'_'+ i +'_'+ j+'.pdf' fig.savefig(save_path)
def visualProfile(m, X, y): # Plot the decision boundary. For that, we will asign a color to each # point in the mesh [x_min, m_max]x[y_min, y_max]. x_min, x_max = X[:, 0].min(), X[:, 0].max() y_min, y_max = X[:, 1].min(), X[:, 1].max() xx, yy = np.meshgrid(np.arange(x_min, x_max, 1), np.arange(y_min, y_max, 1)) Z = m.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.reshape(xx.shape) pl.figure(1, figsize=(4, 3)) pl.pcolormesh(xx, yy, Z, cmap=pl.cm.Paired) # Plot also the training points pl.scatter(X[:, 0], X[:, 1], c=y, edgecolors='k', cmap=pl.cm.Paired) pl.xlabel('Assists') pl.ylabel('Points') pl.xlim(xx.min(), xx.max()) pl.ylim(yy.min(), yy.max()) pl.xticks(()) pl.yticks(()) pl.show()
def showDataOnMesh(k): # Generate a mesh of nodes that covers all train cases def generateTestMesh(trainData): border_offset = 0.5 x_min = min([trainData[i][0][0] for i in range(len(trainData))]) - border_offset x_max = max([trainData[i][0][0] for i in range(len(trainData))]) + border_offset y_min = min([trainData[i][0][1] for i in range(len(trainData))]) - border_offset y_max = max([trainData[i][0][1] for i in range(len(trainData))]) + border_offset h = 0.1 testX, testY = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) return [testX, testY] trainData = generateData() testMesh = generateTestMesh(trainData) testMeshLabels = classifyKNN(trainData, zip(testMesh[0].ravel(), testMesh[1].ravel()), k) classColormap = ListedColormap(['#FF9900', '#00FF00']) testColormap = ListedColormap(['#FFCCAA', '#AAFFAA']) pl.ion() pl.pcolormesh(testMesh[0], testMesh[1], np.asarray(testMeshLabels).reshape(testMesh[0].shape), cmap=testColormap) pl.scatter([trainData[i][0][0] for i in range(len(trainData))], [trainData[i][0][1] for i in range(len(trainData))], c=[trainData[i][1] for i in range(len(trainData))], cmap=classColormap) pl.pause(0.05)
def interp2D(bfieldval, smoothval=501, colortype=yellowredblue, n=4096): parseData() data = getData() cmlabel = 'Normalized $\Delta$R$_D$ (k$\Omega$)' data = [ x for (y, x) in sorted(zip(bfieldval, data), key=lambda pair: pair[0]) ] bval = sorted(bfieldval) xmin = None xmax = 0 for i in data: if i[0][0] > xmin: xmin = i[0][0] if i[0][-1] < xmax: xmax = i[0][-1] xmin += 0.5 xmax -= 0.5 xv = np.linspace(xmin, xmax, n) intv = list() if smoothval != 0: for i in data: dataclip = clip(i, xmin, xmax) #dataclip[1] = SG.savitzky_golay(dataclip[1],1751,3) tck = interpolate.splrep(dataclip[0], dataclip[1]) y = interpolate.splev(xv, tck) y = SG.savitzky_golay(y, smoothval, 3) intv.append(normalize(y)) else: for i in data: dataclip = clip(i, xmin, xmax) tck = interpolate.splrep(dataclip[0], dataclip[1]) y = interpolate.splev(xv, tck) intv.append(normalize(y)) xb = np.linspace(bval[0], bval[-1], n) b = list() for i in range(len(intv[0])): hold = list() for j in range(len(intv)): hold.append(intv[j][i]) b.append(hold) intb = list() for i in b: tck = interpolate.splrep(bval, i) y = interpolate.splev(xb, tck) intb.append(y) Z = np.array(intb) fig = pylab.figure('ColorMap') X, Y = pylab.meshgrid(xb, xv) pylab.pcolormesh(X, Y, Z, vmin=0, vmax=1, cmap=colortype) pylab.ylim(xmin, xmax) pylab.xlim(bval[0], bval[-1]) pylab.ylabel('Gate Voltage (mV)') pylab.xlabel('B Field (T)') cbar = pylab.colorbar() cbar.set_label(cmlabel) plt.ticker.ScalarFormatter(useOffset=None) pylab.show() gc.collect() return X, Y, Z
def multiplot(self,jd1=730120.0, djd=60, dt=20): if not hasattr(self,'disci'): self.generate_regdiscs() self.x = self.disci self.y = self.discj if not hasattr(self,'lon'): self.ijll() figpref.presentation() pl.close(1) pl.figure(1,(10,10)) conmat = self[jd1-730120.0:jd1-730120.0+60, dt:dt+10] x,y = self.gcm.mp(self.lon, self.lat) self.gcm.mp.merid = [] self.gcm.mp.paral = [] pl.subplots_adjust(wspace=0,hspace=0,top=0.95) pl.subplot(2,2,1) pl.pcolormesh(miv(conmat),cmap=cm.hot) pl.clim(0,250) pl.plot([0,800],[0,800],'g',lw=2) pl.gca().set_aspect(1) pl.setp(pl.gca(),yticklabels=[]) pl.setp(pl.gca(),xticklabels=[]) pl.colorbar(aspect=40,orientation='horizontal', pad=0,shrink=.8,fraction=0.05,ticks=[0,50,100,150,200]) pl.subplot(2,2,2) colorvec = (np.nansum(conmat,axis=1)-np.nansum(conmat,axis=0))[1:] self.gcm.mp.scatter(x, y, 10, 'w', edgecolor='k') self.gcm.mp.scatter(x, y, 10, colorvec) self.gcm.mp.nice() pl.clim(0,10000) pl.subplot(2,2,3) colorvec = np.nansum(conmat,axis=1)[1:] self.gcm.mp.scatter(x, y, 10, 'w', edgecolor='k') self.gcm.mp.scatter(x, y, 10, colorvec) self.gcm.mp.nice() pl.clim(0,10000) pl.subplot(2,2,4) colorvec = np.nansum(conmat,axis=0)[1:] self.gcm.mp.scatter(x, y, 10, 'w', edgecolor='k') self.gcm.mp.scatter(x, y, 10, colorvec) self.gcm.mp.nice() pl.clim(0,10000) mycolor.freecbar([0.2,.06,0.6,0.020],[2000,4000,6000,8000]) pl.suptitle("Trajectories seeded from %s to %s, Duration: %i-%i days" % (pl.num2date(jd1).strftime("%Y-%m-%d"), pl.num2date(jd1+djd).strftime("%Y-%m-%d"), dt,dt+10)) pl.savefig('multplot_%i_%03i.png' % (jd1,dt),transparent=True)
def plot_xy_n(self): P.figure() cmap = P.get_cmap('jet') cmap.set_under('white') P.pcolormesh(self.x, self.y, self.xy_n.T, vmin=1, cmap=cmap) P.xlabel('XFOCAL [mm]') P.ylabel('YFOCAL [mm]') P.colorbar(extend='min')
def plot_grid(data, title): g[0] += 1 pylab.subplot(nplots, mplots, g[0]) pylab.pcolormesh(Time, XLengths, data) pylab.colorbar() pylab.title(title) pylab.xlabel("Time (hours)") pylab.ylabel("Offset (km)")
def plot_data(lda, X, y, y_pred, fig_index): splot = pl.subplot(2, 2, fig_index) if fig_index == 1: pl.title('Linear Discriminant Analysis') pl.ylabel('Data with fixed covariance') elif fig_index == 2: pl.title('Quadratic Discriminant Analysis') elif fig_index == 3: pl.ylabel('Data with varying covariances') tp = (y == y_pred) # True Positive tp0, tp1, tp2 = tp[y == 0], tp[y == 1], tp[y == 2] X0, X1, X2 = X[y == 0], X[y == 1], X[y == 2] X0_tp, X0_fp = X0[tp0], X0[~tp0] X1_tp, X1_fp = X1[tp1], X1[~tp1] X2_tp, X2_fp = X2[tp2], X2[~tp2] xmin, xmax = X[:, 0].min(), X[:, 0].max() ymin, ymax = X[:, 1].min(), X[:, 1].max() # class 0: dots pl.plot(X0_tp[:, 0], X0_tp[:, 1], 'o', color='red') pl.plot(X0_fp[:, 0], X0_fp[:, 1], '.', color='#990000') # dark red # class 1: dots pl.plot(X1_tp[:, 0], X1_tp[:, 1], 'o', color='blue') pl.plot(X1_fp[:, 0], X1_fp[:, 1], '.', color='#000099') # dark blue # class 2: dots pl.plot(X2_tp[:, 0], X2_tp[:, 1], 'o', color='green') pl.plot(X2_fp[:, 0], X2_fp[:, 1], '.', color='#009900') # dark green # class 0 and 1 : areas nx, ny = 200, 100 x_min, x_max = pl.xlim() y_min, y_max = pl.ylim() xx, yy = np.meshgrid(np.linspace(x_min, x_max, nx), np.linspace(y_min, y_max, ny)) Z = lda.predict_proba(np.c_[xx.ravel(), yy.ravel()]) Z = Z[:, 1].reshape(xx.shape) pl.pcolormesh(xx, yy, Z, cmap='red_blue_classes', norm=colors.Normalize(0., 1.)) pl.contour(xx, yy, Z, [0.5], linewidths=2., colors='k') # means pl.plot(lda.means_[0][0], lda.means_[0][1], 'o', color='black', markersize=10) pl.plot(lda.means_[1][0], lda.means_[1][1], 'o', color='black', markersize=10) return splot
def graphs_parameters_svm(filename): #reading the features and target variables from the file features, target = read_data(filename) # to normalize the data by subtracting mean and dividing by standard deviation scaler = StandardScaler() features = scaler.fit_transform(features) #setting the ranges of gamma and C C_2d_range = [1, 1e2, 1e4, 1e5] gamma_2d_range = [1e-1, 1, 1e1, 1e2] #classifiers will contain list of all the models for various ranges of C and gamma classifiers = [] for C in C_2d_range: for gamma in gamma_2d_range: clf = SVC(kernel='rbf', C=C, gamma=gamma) clf.fit(features, target) classifiers.append((C, gamma, clf)) target = [int(y) for y in target] pl.figure(figsize=(12, 10)) # construct a mesh h = .02 x = np.array(features, dtype=float) y = np.array(target, dtype= int) x_min, x_max = x[:, 0].min() - 1, x[:, 0].max() + 1 y_min, y_max = x[:, 1].min() - 1, x[:, 1].max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) #Plotting Support vectors for (k, (C, gamma, clf)) in enumerate(classifiers): Z = clf.predict(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) pl.subplot(len(C_2d_range), len(gamma_2d_range), k + 1) pl.title("gamma 10^%d, C 10^%d" % (np.log10(gamma), np.log10(C)),size='medium') pl.contourf(xx, yy, Z, cmap=pl.cm.Paired) pl.scatter(clf.support_vectors_[:, 0],clf.support_vectors_[:, 1], c=y[clf.support_], cmap=pl.cm.Paired) pl.xticks(()) pl.yticks(()) pl.axis('tight') pl.show() pl.figure(figsize=(12, 10)) #plotting decision boundary for (k, (C, gamma, clf)) in enumerate(classifiers): # evaluate decision function in a grid Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) # visualize decision function for these parameters pl.subplot(len(C_2d_range), len(gamma_2d_range), k + 1) pl.title("gamma 10^%d, C 10^%d" % (np.log10(gamma), np.log10(C)), size='medium') # visualize parameter's effect on decision function pl.pcolormesh(xx, yy, -Z, cmap=pl.cm.jet) pl.scatter(features[:, 0], features[:, 1], c=target, cmap=pl.cm.jet) pl.xticks(()) pl.yticks(()) pl.axis('tight') pl.show() optimal_model = grid_search(clf, x, y) print " the optimal parameters are (C gamma):(", optimal_model.C,optimal_model._gamma, ")"
def check_angle(self): ''' Check angle computation ''' plt.pcolormesh(self.lon(), self.lat(), self.angle()) plt.axis('image') plt.colorbar() plt.show() return
def movie(self,fld,k=0,jd1=733342,jd2=733342+10): miv = np.ma.masked_invalid for n,jd in enumerate(np.arange(jd1,jd2,0.25)): self.load(fld,jd) pl.clf() pl.pcolormesh(miv(self.__dict__[fld][0,k,:,:])) pl.clim(-0.1,0.1) pl.savefig('%s_%05i.png' % (fld,n),dpi=150)