Exemple #1
0
def plot(D,title):
    im=plt.imshow(D,interpolation='nearest',cmap='Reds')
    plt.gca().xaxis.tick_top()
    x=np.arange(D.index.shape[0])
    plt.colorbar(im)
    plt.gca().tick_params(axis='both', which='major', labelsize=10)
    plt.title(title,y=1.03)
def benchmark(clf_class, params, name):
    print("parameters:", params)
    t0 = time()
    clf = clf_class(**params).fit(X_train, y_train)
    print("done in %fs" % (time() - t0))

    if hasattr(clf, 'coef_'):
        print("Percentage of non zeros coef: %f"
              % (np.mean(clf.coef_ != 0) * 100))
    print("Predicting the outcomes of the testing set")
    t0 = time()
    pred = clf.predict(X_test)
    print("done in %fs" % (time() - t0))

    print("Classification report on test set for classifier:")
    print(clf)
    print()
    print(classification_report(y_test, pred,
                                target_names=news_test.target_names))

    cm = confusion_matrix(y_test, pred)
    print("Confusion matrix:")
    print(cm)

    # Show confusion matrix
    pl.matshow(cm)
    pl.title('Confusion matrix of the %s classifier' % name)
    pl.colorbar()
Exemple #3
0
def display_image_from_array(nparray,colory='binary',roi=None):
	"""
	Produce a display of the nparray 2D matrix
	@param nparray : image to display
	@type nparray : numpy 2darray
	@param colory : color mapping of the image (see http://www.scipy.org/Cookbook/Matplotlib/Show_colormaps)
	@type colory : string
	"""
	#Set the region of interest to display :
	#  (0,0) is set at lower left corner of the image
	if roi == None:
		roi = ((0,0),nparray.shape)
		nparraydsp = nparray
		print roi
	elif type(roi[0])==tuple and type(roi[1])==tuple: 
		# Case of 2 points definition of the domain : roi = integers index of points ((x1,y1),(x2,y2))
		print roi
		nparraydsp = nparray[roi[0][0]:roi[1][0],roi[0][1]:roi[1][1]]
	elif type(roi[0])==int and type(roi[1])==int:	
		# Case of image centered domain : roi = integers (width,high)
		nparraydsp = nparray[int(nparray.shape[0]/2)-int(roi[0])/2:int(nparray.shape[0]/2)+int(roi[0])/2,int(nparray.shape[1]/2)-int(roi[1])/2:int(nparray.shape[1]/2)+int(roi[1])/2]
	fig = pylab.figure()
    #Display array with grayscale intensity and no pixel smoothing interpolation
	pylab.imshow(nparraydsp,cmap=colory,interpolation='nearest')#,origin='lower')
	pylab.colorbar()
	pylab.axis('off')
Exemple #4
0
 def connection_field_plot_continuous(self,index,afferent=True,density=30):
     weights =  self.proj.getWeights(format='array')
     x = []
     y = []
     w = []
     
     if afferent:
            weights = weights[:,index].ravel()
            p = self.proj.pre
     else:
            weights = weights[index,:].ravel()
            p = self.proj.post
     
     for (ww,i) in zip(weights,numpy.arange(0,len(weights),1)):
               x.append(p.positions[0][i])
               y.append(p.positions[1][i])
               w.append(ww)
         
     bx = min([min(p.positions[0]),min(p.positions[0])])  
     by = max([max(p.positions[1]),max(p.positions[1])])  
     xi = numpy.linspace(min(p.positions[0]),max(p.positions[0]),100)
     yi = numpy.linspace(min(p.positions[1]),max(p.positions[1]),100)
     zi = griddata(x,y,w,xi,yi)
     pylab.figure()
     pylab.imshow(zi)
     pylab.title('Connection field from %s to %s of neuron %d' % (self.source.name,self.target.name,index))
     pylab.colorbar()
def getOptCandGamma(cv_train, cv_label):
    print "Finding optimal C and gamma for SVM with RBF Kernel"
    C_range = 10.0 ** np.arange(-2, 9)
    gamma_range = 10.0 ** np.arange(-5, 4)
    param_grid = dict(gamma=gamma_range, C=C_range)
    cv = StratifiedKFold(y=cv_label, n_folds=40)

    # Use the svm.SVC() as the cost function to evaluate parameter choices
    # NOTE: Perhaps we should run computations in parallel if needed. Does it
    # do that already within the class?
    grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv)
    grid.fit(cv_train, cv_label)

    score_dict = grid.grid_scores_
    scores = [x[1] for x in score_dict]
    scores = np.array(scores).reshape(len(C_range), len(gamma_range))
    pl.figure(figsize=(8,6))
    pl.subplots_adjust(left=0.05, right=0.95, bottom=0.15, top=0.95)
    pl.imshow(scores, interpolation='nearest', cmap=pl.cm.spectral)
    pl.xlabel('gamma')
    pl.ylabel('C')
    pl.colorbar()
    pl.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45)
    pl.yticks(np.arange(len(C_range)), C_range)
    pl.show()

    print "The best classifier is: ", grid.best_estimator_
Exemple #6
0
def makeContourPlot(scores, average, HEIGHT, WIDTH, outputId, maskId, plt_title, outputdir, barcodeId=-1, vmaxVal=100):
    pylab.bone()
    #majorFormatter = FormatStrFormatter('%.f %%')
    #ax = pylab.gca()
    #ax.xaxis.set_major_formatter(majorFormatter)
    
    pylab.figure()
    ax = pylab.gca()
    ax.set_xlabel(str(WIDTH) + ' wells')
    ax.set_ylabel(str(HEIGHT) + ' wells')
    ax.autoscale_view()
    pylab.jet()
    
    pylab.imshow(scores,vmin=0, vmax=vmaxVal, origin='lower')
    pylab.vmin = 0.0
    pylab.vmax = 100.0
    ticksVal = getTicksForMaxVal(vmaxVal)
    pylab.colorbar(format='%.0f %%',ticks=ticksVal)
    print "'%s'" % average
    if(barcodeId!=-1):
        if(barcodeId==0): maskId = "No Barcode Match,"
        else:             maskId = "Barcode Id %d," % barcodeId
    if plt_title != '': maskId = '%s\n%s' % (plt_title,maskId)
    print "Checkpoint A"
    pylab.title('%s Loading Density (Avg ~ %0.f%%)' % (maskId, average))
    pylab.axis('scaled')
    print "Checkpoint B"
    pngFn = outputdir+'/'+outputId+'_density_contour.png'
    print "Try save to", pngFn;
    pylab.savefig(pngFn, bbox_inches='tight')
    print "Plot saved to", pngFn;
Exemple #7
0
    def displayResults(self,res, cm=pylab.cm.gray, title='Specify a title'):
        if self.display:
		self.count=self.count+1
        	pylab.figure(self.count)
        	pylab.imshow(res, cm, interpolation='nearest')
        	pylab.colorbar()
        	pylab.title(title)
def correlation_matrix(data, size=8.0):
    """ Calculates and shows the correlation matrix of the pandas data frame
        'data' as a heat map.
        Only the correlations between numerical variables are calculated!
    """
    # calculate the correlation matrix
    corr = data.corr()
    #print corr
    lc = len(corr.columns)
    # set some settings for plottin'
    pl.pcolor(corr, vmin = -1, vmax = 1, edgecolor = "black")
    pl.colorbar()
    pl.xlim([-5,lc])
    pl.ylim([0,lc+5])
    pl.axis('off')
    # anotate the rows and columns with their corresponding variables
    ax = pl.gca()            
    for i in range(0,lc):
        ax.annotate(corr.columns[i], (-0.5, i+0.5), \
            size='large', horizontalalignment='right', verticalalignment='center')
        ax.annotate(corr.columns[i], (i+0.5, lc+0.5),\
            size='large', rotation='vertical',\
            horizontalalignment='center', verticalalignment='right')
    # change the size of the image
    fig = pl.figure(num=1)    
    fig.set_size_inches(size+(size/4), size)     
    
    pl.show()
def window_fn_matrix(Q,N,num_remov=None,save_tag=None,lms=None):
    Q = n.matrix(Q); N = n.matrix(N)
    Ninv = uf.pseudo_inverse(N,num_remov=None) # XXX want to remove dynamically
    #print Ninv 
    info = n.dot(Q.H,n.dot(Ninv,Q))
    M = uf.pseudo_inverse(info,num_remov=num_remov)
    W = n.dot(M,info)

    if save_tag!=None:
        foo = W[0,:]
        foo = n.real(n.array(foo))
        foo.shape = (foo.shape[1]),
        print foo.shape
        p.scatter(lms[:,0],foo,c=lms[:,1],cmap=mpl.cm.PiYG,s=50)
        p.xlabel('l (color is m)')
        p.ylabel('W_0,lm')
        p.title('First Row of Window Function Matrix')
        p.colorbar()
        p.savefig('{0}/{1}_W.pdf'.format(fig_loc,save_tag))
        p.clf()

        print 'W ',W.shape
        p.imshow(n.real(W))
        p.title('Window Function Matrix')
        p.colorbar()
        p.savefig('{0}/{1}_W_im.pdf'.format(fig_loc,save_tag))
        p.clf()


    return W
Exemple #10
0
    def __call__(self, n):
        if len(self.f.shape) == 3:
            # f = f[x,v,t], 2 dim in phase space
            ft = self.f[n,:,:]
            pylab.pcolormesh(self.X, self.V, ft.T, cmap = 'jet')
            pylab.colorbar()
            pylab.clim(0,0.38) # for Landau test case
            pylab.grid()
            pylab.axis([self.xmin, self.xmax, self.ymin, self.ymax])
            pylab.xlabel('$x$', fontsize = 18)
            pylab.ylabel('$v$', fontsize = 18)
            pylab.title('$N_x$ = %d, $N_v$ = %d, $t$ = %2.1f' % (self.x.N, self.v.N, self.it*self.t.width))
            pylab.savefig(self.path + self.filename)
            pylab.clf()
            return None

        if len(self.f.shape) == 2:
            # f = f[x], 1 dim in phase space
            ft = self.f[n,:]
            pylab.plot(self.x.gridvalues,ft,'ob')
            pylab.grid()
            pylab.axis([self.xmin, self.xmax, self.ymin, self.ymax])
            pylab.xlabel('$x$', fontsize = 18)
            pylab.ylabel('$f(x)$', fontsize = 18)
            pylab.savefig(self.path + self.filename)
            return None
Exemple #11
0
def psfplots():
	tpsf = wise.get_psf_model(1, pixpsf=True)
	
	psfp = tpsf.getPointSourcePatch(0, 0)
	psf = psfp.patch
	
	psf /= psf.sum()
	
	plt.clf()
	plt.imshow(np.log10(np.maximum(1e-5, psf)), interpolation='nearest', origin='lower')
	plt.colorbar()
	ps.savefig()
	
	h,w = psf.shape
	cx,cy = w/2, h/2
	
	X,Y = np.meshgrid(np.arange(w), np.arange(h))
	R = np.sqrt((X - cx)**2 + (Y - cy)**2)
	plt.clf()
	plt.semilogy(R.ravel(), psf.ravel(), 'b.')
	plt.xlabel('Radius (pixels)')
	plt.ylabel('PSF value')
	plt.ylim(1e-8, 1.)
	ps.savefig()
	
	plt.clf()
	plt.loglog(R.ravel(), psf.ravel(), 'b.')
	plt.xlabel('Radius (pixels)')
	plt.ylabel('PSF value')
	plt.ylim(1e-8, 1.)
	ps.savefig()
	
	print('PSF norm:', np.sqrt(np.sum(np.maximum(0, psf)**2)))
	print('PSF max:', psf.max())
 def savepng(pre, img, title=None, **kwargs):
     fn = '%s-%s.png' % (pre, idstr)
     print 'Saving', fn
     plt.clf()
     plt.imshow(img, **kwargs)
     ax = plt.axis()
     if debug:
         print len(xplotx),len(allobjx)
         for i,(objx,objy,objc) in enumerate(zip(allobjx,allobjy,allobjc)):
             plt.plot(objx,objy,'-',c=objc)
             tempx = []
             tempx.append(xplotx[i])
             tempx.append(objx[0])
             tempy = []
             tempy.append(xploty[i])
             tempy.append(objy[0])
             plt.plot(tempx,tempy,'-',c='purple')
         plt.plot(pointx,pointy,'y.')
         plt.plot(xplotx,xploty,'xg')
     plt.axis(ax)
     if title is not None:
         plt.title(title)
     plt.colorbar()
     plt.gray()
     plt.savefig(fn)
Exemple #13
0
def plot_ch():
    for job in jobs_orig:
        print "plane of", job.path
        pylab.clf()
        x_center = int((job.position(0)[0] + job.position(1)[0])/2)
        x_final = 50 + x_center
        #plane = np.concatenate((job.plane(y=50)[:, x_final:], 
        #                        job.plane(y=50)[:, :x_final]), axis=1)
        plane = job.plane(y=50)
        myplane = plane[plane < 0.0]
        p0 = myplane.min()
        p12 = np.median(myplane)
        p14 = np.median(myplane[myplane<p12])
        p34 = np.median(myplane[myplane>p12])
        p1 = myplane.max()
        contour_values = (p0, p14, p12, p34, p1)
        pylab.title(r'$u_x=%.4f,\  D_{-}=%.4f,\  D_{+}=%.4f,\ ch=%i$ ' %
                    (job.u_x, job.D_minus, job.D_plus, job.ch_objects))
        car = pylab.imshow(plane, vmin=-0.001, vmax=0.0, 
                           interpolation='nearest')
        pylab.contour(plane, contour_values, linestyles='dashed', 
                                             colors='white')
        pylab.grid(True)
        pylab.colorbar(car)
        #imgfilename = 'plane_r20-y50-u_x%.4fD%.4fch%03i.png' % \
        #              (job.u_x, job.D_minus, job.ch_objects)
        imgfilename = 'plane_%s.png' % job.job_id
        pylab.savefig(imgfilename)
Exemple #14
0
def pressx(ifile, varkey, options, before = '', after = ''):
    import pylab as pl
    from matplotlib.colors import Normalize, LogNorm
    outpath = getattr(options, 'outpath', '.')
    vert = getpresbnds(ifile)
    var = ifile.variables[varkey]
    dims = [(k, l) for l, k in zip(var[:].shape, var.dimensions) if l > 1]
    if len(dims) > 2:
        raise ValueError('Press-x can have 2 non-unity dimensions; got %d - %s' % (len(dims), str(dims)))
    if options.logscale:
        norm = LogNorm()
    else:
        norm = Normalize()
    exec(before)
    ax = pl.gca()
    print(varkey, end = '')
    vals = var[:].squeeze()
    x = np.arange(vals.shape[1])
    patches = ax.pcolor(x, vert, vals, norm = norm)
    #ax.set_xlabel(X.units.strip())
    #ax.set_ylabel(Y.units.strip())
    pl.colorbar(patches)
    ax.set_ylim(vert.max(), vert.min())
    ax.set_xlim(x.min(), x.max())
    fmt = 'png'
    figpath = os.path.join(outpath + '_PRESX_' + varkey + '.' + fmt)
    exec(after)
    pl.savefig(figpath)
    print('Saved fig', figpath)
    return figpath
def plot_worker(jobq,mask,pid,lineshape,range):
    '''
    args[0] = array file name
    args[1] = output figure name
    if mask, where masked==0 is masked
    '''

    if lineshape:
        lines = shapefile.load_shape_list(lineshape)
    else:
        lines = None
    while True:
        #--get some args from the queue
        args = jobq.get()
        #--check if this is a sentenial
        if args == None:
            break
        #--load
        if args[2]:
            arr = np.fromfile(args[0],dtype=np.float32)
            arr.resize(bro.nrow,bro.ncol)
        else:
            arr = np.loadtxt(args[0])
        
        if mask != None:
            arr = np.ma.masked_where(mask==0,arr)        
        #print args[0],arr.min(),arr.max(),arr.mean()
        #--generic plotting
        fig = pylab.figure()
        ax = pylab.subplot(1,1,1,aspect='equal')
        
        if range:
            vmax = range[1]
            vmin = range[0]
        else:
            vmax = arr.max()
            vmin = arr.min()

        #p = ax.imshow(arr,interpolation='none')        
        p = ax.pcolor(bro.X,bro.Y,np.flipud(arr),vmax=vmax,vmin=vmin)
        pylab.colorbar(p)
        if lines:
            for line in lines:
                ax.plot(line[0,:],line[1,:],'k-',lw=1.0)
                #break
        ax.set_xticklabels([])
        ax.set_yticklabels([])
        ax.set_xlim(bro.plt_x)
        ax.set_ylim(bro.plt_y)
        ax.set_title(args[0])
        fmt = args[1].split('.')[-1]
        pylab.savefig(args[1],dpi=300,format=fmt)
        pylab.close(fig)
        #--mark this task as done
        jobq.task_done()
        print 'plot worker',pid,' finished',args[0]

    #--mark the sentenial as done
    jobq.task_done()
    return
def plot_gc_distribution(pp, data):
    names = data.keys()

    # Plot the 2D histogram of coverage vs gc
    for name in names:
        x = [ i * 100 for i in data[name][GC_DISTRIBUTION_NAME]['gc_samples'] ]
        y = data[name][GC_DISTRIBUTION_NAME]['cov_samples']
        

        # Use the median to determine the range to show and round
        # to nearest 100 to avoid aliasing artefacts 
        m = np.median(y)
        y_limit = math.ceil( 2*m / 100) * 100
        hist,xedges,yedges = np.histogram2d(x,y, bins=[20, 50], range=[ [0, 100.0], [0, y_limit] ])

        # draw the plot
        extent = [xedges[0], xedges[-1], yedges[0], yedges[-1] ]
        pl.imshow(hist.T,extent=extent,interpolation='nearest',origin='lower', aspect='auto')

        pl.colorbar()
        pl.title(name + ' GC Bias')
        pl.xlabel("GC %")
        pl.ylabel("k-mer coverage")
        pl.savefig(pp, format='pdf')
        pl.close()
Exemple #17
0
    def plot_plasma(self):

        P.tricontourf(self.rzt[:, 0], self.rzt[:, 1],
                      self.tris, self.beta, 1001, zorder=0)
        cticks = P.linspace(0.0, 0.2, 5)
        P.colorbar(ticks=cticks, format='%.2f')
        P.jet()
Exemple #18
0
    def dovis(self):
        """
        Do runtime visualization. 
        """

        pylab.clf()

        phi = self.cc_data.get_var("phi")

        myg = self.cc_data.grid

        pylab.imshow(numpy.transpose(phi[myg.ilo:myg.ihi+1,
                                         myg.jlo:myg.jhi+1]), 
                     interpolation="nearest", origin="lower",
                     extent=[myg.xmin, myg.xmax, myg.ymin, myg.ymax])

        pylab.xlabel("x")
        pylab.ylabel("y")
        pylab.title("phi")

        pylab.colorbar()
        
        pylab.figtext(0.05,0.0125, "t = %10.5f" % self.cc_data.t)

        pylab.draw()
def field_map_ndar(ndar_field,t,ar_coorx,ar_coory,X,image_out,variable):

    ar_field=ndar_field[t,:]
    max_val=int(np.max(ndar_field))
    if variable==4:
        max_val=100.
    xmin=min(ar_coorx);xmax=max(ar_coorx)
    ymin=min(ar_coory);ymax=max(ar_coory)
    step=X
    nx=(xmax-xmin)/step+1
    ny=(ymax-ymin)/step+1

    ar_indx=np.array((ar_coorx-xmin)/step,int)
    ar_indy=np.array((ar_coory-ymin)/step,int)

    ar_map=np.ones((ny,nx))*-99.9
    ar_map[ar_indy,ar_indx]=ar_field

    ar_map2 = M.masked_where(ar_map <0, ar_map)
    ut.check_file_exist(image_out)

    pl.clf()
    pl.axes(axisbg='gray')
    pl.imshow(ar_map2, cmap=pl.cm.RdBu,
              interpolation='Nearest', origin='lower', vmax=max_val, vmin=0)
    pl.title('time step= '+ut.string(t,len(str(t))))
    pl.colorbar()
    pl.savefig(image_out)
Exemple #20
0
def VisualizeAlm(alm,figno=1,max_l=None):
    """ Visualize a healpy a_lm vector """
    lmax = hp.Alm.getlmax(f_lm.size)
    l,m = hp.Alm.getlm(lmax)
    mag = np.zeros([lmax+1,lmax+1])
    phs = np.zeros([lmax+1,lmax+1])
    mag[m,l] = np.abs(alm)
    phs[m,l] = np.angle(alm)
    cl = hp.alm2cl(alm)
    # Decide the range of l to plot
    if max_l != None:
        max_l = (max_l if (max_l <= lmax) else lmax)
    else:
        max_l = lmax 
    print max_l
    plt.figure(figno)
    plt.clf()
    plt.subplot(211)
    plt.imshow(mag[0:max_l,0:max_l],interpolation='nearest',origin='lower')
    plt.colorbar()
    plt.subplot(212)
    plt.imshow(phs[0:max_l,0:max_l],interpolation='nearest',origin='lower')
    plt.colorbar()
    # plt.subplot(313)
    #plt.semilogy(cl[0:max_l])
    return {'mag':mag,'phs':phs,'cl':cl}
Exemple #21
0
def plotphicont(X,Y,Z):
    from pylab import subplot, show, contourf, contour, colorbar, title
    spl = subplot(111)
    cpl = contour(X,Y,Z,100)
    colorbar(cpl)
    spl.set_aspect('equal','box')
    title(r'Field potential $\varphi$')
def plot_box_data(field,redshift):
    nf1 = get_box_data(field,redshift)
    chosenIndex=200
    nf1 = nf1.reshape((400,400,400))[chosenIndex,:,:]
    plt.imshow(nf1)
    plt.colorbar()
    plt.show()
Exemple #23
0
def draw_heat_graph(getgraph, opts):
    # from pyevolve_graph script

    stage_points = getgraph()

    fg = pl.figure()
    ax = fg.add_subplot(111)

    pl.imshow(
        stage_points, aspect="auto", interpolation="gaussian",
        cmap=matplotlib.cm.__dict__["jet"])
    pl.title("Population scores along the generations")

    def labelfmt(x, pos=0):
        # there is surely a better way to do that
        return (float(x) == int(x)) and '%d' % (x) or ''
    ax.xaxis.set_major_formatter(pl.FuncFormatter(labelfmt))

    pl.xlabel('Generations -->')
    pl.ylabel('Sorted Population Results')
    pl.grid(True)
    pl.colorbar()

    if opts.outfile:
        fg.savefig(opts.outfile)

    if opts.show:
        pl.show()
Exemple #24
0
 def plothistory(self):
     a=self.a
     b=self.b
     plt.figure(figsize=(12,6))
     I=np.concatenate([a.T,np.array(np.nansum(a[:,:3],1),ndmin=2),
         np.array(np.nansum(a[:,3:6],1),ndmin=2),np.array(np.nansum(a[:,6:],1),ndmin=2)],axis=0)
     plt.plot(range(b.size),b,'rx',ms=8,mew=2)
     plt.plot([10.5,10.5],[-1,I.shape[1]],'r',lw=2)
     plt.imshow(I,interpolation='nearest',cmap='winter')
     plt.colorbar()
     ax=plt.gca()
     ax.set_yticks(range(I.shape[0]))
     ax.set_yticklabels(['']*a.shape[0]+['color','rel len','abs len'])
     c1=plt.Circle((-1.5,0),radius=0.4,color='blue',clip_on=False)
     c2=plt.Circle((-1.5,1),radius=0.4,color='white',clip_on=False)
     c3=plt.Circle((-1.5,2),radius=0.4,color='yellow',clip_on=False)
     ax.add_patch(c1);ax.add_patch(c2);ax.add_patch(c3);
     c1=plt.Rectangle((-2,3),1,0.2,color='white',clip_on=False)
     c2=plt.Rectangle((-2.5,4),1.5,0.2,color='white',clip_on=False)
     c3=plt.Rectangle((-3,5),2,0.2,color='white',clip_on=False)
     ax.add_patch(c1);ax.add_patch(c2);ax.add_patch(c3);
     c1=plt.Rectangle((-2,6),1,0.2,color='gray',clip_on=False)
     c2=plt.Rectangle((-2.5,7),1.5,0.2,color='gray',clip_on=False)
     c3=plt.Rectangle((-3,8),2,0.2,color='gray',clip_on=False)
     c4=plt.Rectangle((-3.5,9),2.5,0.2,color='gray',clip_on=False)
     ax.add_patch(c1);ax.add_patch(c2);ax.add_patch(c3);ax.add_patch(c4);
     print I[-3,-1]
Exemple #25
0
def plot_samples_distance(dataset, sortbyattr=None):
    """Plot the euclidean distances between all samples of a dataset.

    Parameters
    ----------
    dataset : Dataset
      Providing the samples.
    sortbyattr : None or str
      If None, the samples distances will be in the same order as their
      appearance in the dataset. Alternatively, the name of a samples
      attribute can be given, which wil then be used to sort/group the
      samples, e.g. to investigate the similarity samples by label or by
      chunks.
    """
    if sortbyattr is not None:
        slicer = []
        for attr in dataset.sa[sortbyattr].unique:
            slicer += \
                get_samples_by_attr(dataset, sortbyattr, attr).tolist()
        samples = dataset.samples[slicer]
    else:
        samples = dataset.samples

    ed = np.sqrt(squared_euclidean_distance(samples))

    pl.imshow(ed, interpolation='nearest')
    pl.colorbar()
def plot_dendrogram_and_matrix(linkage, matrix, color_threshold=None):
    # Compute and plot dendrogram.
    fig = pylab.figure(figsize=(20,20))
    axdendro = fig.add_axes([0.09,0.1,0.2,0.8])
    dendrogram = sch.dendrogram(linkage, color_threshold=color_threshold, orientation='right')
    axdendro.set_xticks([])
    axdendro.set_yticks([])

    # Plot distance matrix.
    axmatrix = fig.add_axes([0.3,0.1,0.6,0.8])
    index = dendrogram['leaves']
    D = matrix[:]
    D = D[index,:]
    D = D[:,index]
    im = axmatrix.matshow(D, aspect='auto', origin='lower')
    axmatrix.set_xticks([])
    axmatrix.set_yticks([])

    # Plot colorbar.
    axcolor = fig.add_axes([0.91,0.1,0.02,0.8])
    pylab.colorbar(im, cax=axcolor)

    # Display and save figure.
    fig.show()
    raw_input()

    return dendrogram
def plot_mtx(mtx=None, title=None, newfig=False, cbar=True, **kwargs):
    """
    ::

        static method for plotting a matrix as a time-frequency distribution (audio features)
    """
    if mtx is None or type(mtx) != np.ndarray:
        raise ValueError('First argument, mtx, must be a array')
    if newfig: P.figure()
    dbscale = kwargs.pop('dbscale', False) 
    bels = kwargs.pop('bels',False)
    norm = kwargs.pop('norm',False)
    normalize = kwargs.pop('normalize',False)
    origin=kwargs.pop('origin','lower')
    aspect=kwargs.pop('aspect','auto')
    interpolation=kwargs.pop('interpolation','nearest')
    cmap=kwargs.pop('cmap',P.cm.gray_r)
    clip=-100.
    X = scale_mtx(mtx, normalize=normalize, dbscale=dbscale, norm=norm, bels=bels)
    i_min, i_max = np.where(X.mean(1))[0][[0,-1]]
    X = X[i_min:i_max+1].copy()
    if dbscale or bels:
        if bels: clip/=10.
        P.imshow(P.clip(X,clip,0),origin=origin, aspect=aspect, interpolation=interpolation, cmap=cmap, **kwargs)
    else:
        P.imshow(X,origin=origin, aspect=aspect, interpolation=interpolation, cmap=cmap, **kwargs)
    if title:
        P.title(title,fontsize=16)
    if cbar:
        P.colorbar()
    P.yticks(np.arange(0,i_max+1-i_min,3),pc_labels[i_min:i_max+1:3],fontsize=14)
    P.xlabel('Tactus', fontsize=14)
    P.ylabel('MIDI Pitch', fontsize=14)
    P.grid()
def plot_C_gamma_grid_search(grid, C_range, gamma_range, score):
    '''
    Plots the scores computed on a grid. 
    
    Arguments: 
        grid - the grid search object created using GridSearchCV()
        C_range - the C parameter range 
        gamma_range - the gamma parameter range 
        score - the scoring function  
        
    
    '''

    # grid_scores_ contains parameter settings and scores
    # We extract just the scores
    scores = [x[1] for x in grid.grid_scores_]
    scores = np.array(scores).reshape(len(C_range), len(gamma_range))
    
    # draw heatmap of accuracy as a function of gamma and C
    pl.figure(figsize=(8, 6))
    pl.subplots_adjust(left=0.05, right=0.95, bottom=0.15, top=0.95)
    pl.imshow(scores, interpolation='nearest', cmap=pl.cm.spectral)
    pl.title("Grid search on C and gamma for best %s" % score)
    pl.xlabel('gamma')
    pl.ylabel('C')
    pl.colorbar()
    pl.xticks(np.arange(len(gamma_range)), gamma_range, rotation=45)
    pl.yticks(np.arange(len(C_range)), C_range)
    
    pl.show()
def plot_vel_vs_h3(maps, out_suffix=''):
    xaxis = (np.arange(len(maps.velocity[0])) - ppxf_m31.bhpos_pix[0]) * 0.05
    yaxis = (np.arange(len(maps.velocity)) - ppxf_m31.bhpos_pix[1]) * 0.05
    yy, xx = np.meshgrid(xaxis, yaxis)
    radius = np.hypot(xx, yy)
    good = np.where((np.abs(yy) < 0.5) & (np.abs(xx) < 1.0))
        
    plt.scatter(maps.velocity[good], maps.h3[good], c=maps.sigma[good], s=5,
                    marker='o', vmin=0, vmax=450)
    plt.xlim(-700, 0)
    plt.ylim(-0.5, 0.5)
    plt.colorbar(label='Sigma (km/s)')
    plt.axhline(linestyle='--', color='grey')
    plt.xlabel('Velocity (km/s)')
    plt.ylabel('h3')
    plt.savefig(plot_dir + 'vel_vs_h3' + out_suffix + '.png')

    plt.clf()
    plt.scatter(maps.sigma[good], maps.h3[good], c=maps.velocity[good], s=5,
                    marker='o', vmin=-700, vmax=0)
    plt.xlim(0, 450)
    plt.ylim(-0.5, 0.5)
    plt.colorbar(label='Velocity (km/s)')
    plt.axhline(linestyle='--', color='grey')
    plt.xlabel('Sigma (km/s)')
    plt.ylabel('h3')
    plt.savefig(plot_dir + 'sig_vs_h3' + out_suffix + '.png')

    return
def main():
    base_path = "/caps2/tsupinie/1kmf-control/"
    temp = goshen_1km_temporal(start=14400, end=14400)
    grid = goshen_1km_grid()
    n_ens_members = 40

    np.seterr(all='ignore')

    ens = loadEnsemble(base_path, [ 11 ], temp.getTimes(), ([ 'pt', 'p' ], computeDensity))
    ens = ens[0, 0]

    zs = decompressVariable(nio.open_file("%s/ena001.hdfgrdbas" % base_path, mode='r', format='hdf').variables['zp'])
    xs, ys = grid.getXY()
    xs = xs[np.newaxis, ...].repeat(zs.shape[0], axis=0)
    ys = ys[np.newaxis, ...].repeat(zs.shape[0], axis=0)

    eff_buoy = effectiveBuoyancy(ens, (zs, ys, xs), plane={'z':10})
    print eff_buoy

    pylab.figure()
    pylab.contourf(xs[0], ys[0], eff_buoy[0], cmap=matplotlib.cm.get_cmap('RdBu_r'))
    pylab.colorbar()

    grid.drawPolitical()

    pylab.suptitle("Effective Buoyancy")
    pylab.savefig("eff_buoy.png")
    pylab.close()
    return
Exemple #31
0
def plot_coef_corr_seismic(offsets, trace_synt_ar_int, seismicData, T,
                           tmp_time_max, data_KK, int_time1, int_time2,
                           data_short, sgyname):
    """
    Draws coef. correlation betweem real and synthetic seismic data
    
    Parameters
    ----------
    Input:
        offsets - array of offsets (m)
        trace_synt_ar_int - synthetic data interpolate on seismic grid
        seismicData - seismic data class
        T - time axs for seismic
        tmp_time_max - picking of max amplitude near explored horizon
        data_KK - array with correlation coefficients
        int_time1 - upper time near explored horizon
        int_time2 - lower time near explored horizon
        data_short - interval real data
        sgy_name - name of current segy

    Output:
        subplot figure with synthetic and real data, color shows the correlation coefficient between them
    """
    #допилить граничные значения
    #T_plot = np.arange(int_time1, int_time2, seismicData.dt)
    fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(35, 12))

    plt.sca(axs[0])

    for ii in range(trace_synt_ar_int.shape[0]):
        plt.plot(
            trace_synt_ar_int[ii, :] / np.max(trace_synt_ar_int[ii, :]).T +
            ii + 1,
            T,
            'k',
            alpha=1,
            zorder=1,
            lw=1)
        x = trace_synt_ar_int[ii, :] / np.max(
            trace_synt_ar_int[ii, :]).T + ii + 1
        T_tmp_plot = np.arange(int_time1, int_time2, seismicData.dt / 4)
        y = T_tmp_plot
        x2 = np.interp(T_tmp_plot, T, x)
        plt.fill_betweenx(y, ii + 1, x2, where=(x2 > ii + 1), color='k')
        #plt.plot(ii+1,pick[ii],'c.',alpha=1,zorder=1,lw=1,markersize = 10)
    x1 = np.arange(1, ii + 1)
    plt.xticks(
        x1,
        (np.round(offsets / 1000, decimals=1)),
        fontsize=10,
    )
    plt.gca().invert_yaxis()
    plt.xlabel('Offset,km')
    plt.ylabel('T, ms')
    plt.title('Synthetic')
    plt.plot(tmp_time_max, '.r', markersize=25)
    plt.imshow(
        np.zeros_like(data_KK[:, :len(offsets)]),
        alpha=1,
        extent=[1, len(offsets) + 1, int_time2, int_time1],
        aspect='auto',
        cmap='Pastel1_r',
        vmin=0,
        vmax=0,
    )

    plt.sca(axs[1])

    for ii in range(data_short.shape[0]):
        plt.plot(data_short[ii, :] / np.max(data_short[ii, :]).T + ii + 1,
                 T,
                 'k',
                 alpha=1,
                 zorder=1,
                 lw=1)
        x = data_short[ii, :] / np.max(data_short[ii, :]).T + ii + 1
        T_tmp_plot = np.arange(int_time1, int_time2, seismicData.dt / 4)
        y = T_tmp_plot
        x2 = np.interp(T_tmp_plot, T, x)
        plt.fill_betweenx(y, ii + 1, x2, where=(x2 > ii + 1), color='k')
        #plt.plot(ii+1,pick[ii],'c.',alpha=1,zorder=1,lw=1,markersize = 10)

    x1 = np.arange(1, ii + 1)
    plt.xticks(
        x1,
        (np.round(offsets[:-2] / 1000, decimals=1)),
        fontsize=10,
    )
    plt.gca().invert_yaxis()
    plt.xlabel('Offset,km')
    plt.ylabel('T, ms')
    plt.title(sgyname)

    im = plt.imshow(
        data_KK[:, :len(offsets) + 1],
        alpha=1,
        extent=[1, len(offsets) + 1, int_time2, int_time1],
        aspect='auto',
        cmap='rainbow',
        vmin=0,
        vmax=1,
    )
    divider = make_axes_locatable(axs[1])
    cax = divider.append_axes("right", size="5%", pad=0.05)
    cbar = plt.colorbar(im, cax=cax)
    cbar.set_label('Coef. correlation', rotation=270)
    return
    def run(self):
        """
		2007-03-29
		2007-04-03
		2007-05-01
			--db_connect()
			--FilterStrainSNPMatrix_instance.read_data()
			if self.comparison_only:
				--FilterStrainSNPMatrix_instance.read_data()
			else:
				--get_SNPpos2index()
				--create_SNP_matrix_2010()
					--get_align_length_from_fname()
						--get_positions_to_be_checked_ls()
					--get_align_matrix_from_fname()
						--get_positions_to_be_checked_ls()
				--get_mapping_info_regarding_strain_acc()
				--shuffle_data_matrix_according_to_strain_acc_ls()
				--FilterStrainSNPMatrix_instance.write_data_matrix()
			
			--extract_sub_data_matrix()
			if self.sub_justin_output_fname:
				--FilterStrainSNPMatrix_instance.write_data_matrix()
			--compare_two_SNP_matrix()
			--outputDiffType()
			
		"""
        from FilterStrainSNPMatrix import FilterStrainSNPMatrix
        FilterStrainSNPMatrix_instance = FilterStrainSNPMatrix()
        header, src_strain_acc_list, category_list, data_matrix = FilterStrainSNPMatrix_instance.read_data(
            self.input_fname)
        if self.comparison_only:
            header, strain_acc_ls, abbr_name_ls_sorted, SNP_matrix_2010_sorted = FilterStrainSNPMatrix_instance.read_data(
                self.output_fname)
            SNP_matrix_2010_sorted = Numeric.array(SNP_matrix_2010_sorted)
        else:
            (conn, curs) = db_connect(self.hostname, self.dbname, self.schema)
            #extract data from alignment
            snp_acc_ls = header[2:]
            SNPpos2index = self.get_SNPpos2index(curs, snp_acc_ls,
                                                 self.snp_locus_table)
            abbr_name_ls, SNP_matrix_2010 = self.create_SNP_matrix_2010(
                SNPpos2index, self.data_dir_2010)
            strain_acc_ls, strain_acc2abbr_name, strain_acc2index = self.get_mapping_info_regarding_strain_acc(
                curs, self.strain_info_table, self.strain_info_2010_table,
                abbr_name_ls)
            SNP_matrix_2010_sorted = self.shuffle_data_matrix_according_to_strain_acc_ls(
                SNP_matrix_2010, strain_acc_ls, strain_acc2index)
            abbr_name_ls_sorted = []
            for strain_acc in strain_acc_ls:
                abbr_name_ls_sorted.append(strain_acc2abbr_name[strain_acc])
            FilterStrainSNPMatrix_instance.write_data_matrix(
                SNP_matrix_2010_sorted, self.output_fname, header,
                strain_acc_ls, abbr_name_ls_sorted)

        #comparison
        data_matrix = Numeric.array(data_matrix)
        sub_data_matrix = self.extract_sub_data_matrix(src_strain_acc_list,
                                                       data_matrix,
                                                       strain_acc_ls)
        if self.sub_justin_output_fname:
            FilterStrainSNPMatrix_instance.write_data_matrix(
                sub_data_matrix, self.sub_justin_output_fname, header,
                strain_acc_ls, abbr_name_ls_sorted)
        diff_matrix, diff_tag_dict, diff_tag2counter = self.compare_two_SNP_matrix(
            SNP_matrix_2010_sorted, sub_data_matrix)
        if self.diff_output_fname:
            self.outputDiffType(diff_matrix, SNP_matrix_2010_sorted,
                                sub_data_matrix, diff_tag_dict,
                                self.diff_type_to_be_outputted,
                                abbr_name_ls_sorted, header[2:],
                                self.diff_output_fname)

        summary_result_ls = []
        for tag, counter in diff_tag2counter.iteritems():
            summary_result_ls.append('%s(%s):%s' %
                                     (tag, diff_tag_dict[tag], counter))
            print '\t%s(%s)\t%s' % (tag, diff_tag_dict[tag], counter)
        import pylab
        pylab.clf()
        diff_matrix_reverse = list(diff_matrix)
        diff_matrix_reverse.reverse()
        diff_matrix_reverse = Numeric.array(diff_matrix_reverse)
        pylab.imshow(diff_matrix_reverse, interpolation='nearest')
        pylab.title(' '.join(summary_result_ls))
        pylab.colorbar()
        pylab.show()

        #2007-11-01 do something as CmpAccession2Ecotype.py
        from CmpAccession2Ecotype import CmpAccession2Ecotype
        CmpAccession2Ecotype_ins = CmpAccession2Ecotype()
        nt_number2diff_matrix_index = CmpAccession2Ecotype_ins.get_nt_number2diff_matrix_index(
            nt2number)
        dc_placeholder = dict(
            zip(range(sub_data_matrix.shape[0]),
                range(sub_data_matrix.shape[1])))
        diff_matrix_ls = CmpAccession2Ecotype_ins.cmp_two_matricies(
            SNP_matrix_2010_sorted, sub_data_matrix,
            nt_number2diff_matrix_index, dc_placeholder, dc_placeholder,
            dc_placeholder)
        print diff_matrix_ls
Exemple #33
0
            for ch in xrange(eor1.shape[0]):
                eor1[ch] = n.convolve(eor1[ch], fringe_filter, mode='same')
        else: # this one is the exact one
            ij = a.miriad.bl2ij(bls_master[0])
            #beam_w_fr = capo.frf_conv.get_beam_w_fr(aa, bl)
            #t, firs, frbins,frspace = capo.frf_conv.get_fringe_rate_kernels(beam_w_fr, inttime, FRF_WIDTH)
            frp, bins = fringe.aa_to_fr_profile(aa, ij, 100)
            timebins, firs = fringe.frp_to_firs(frp, bins, aa.get_freqs(), fq0=aa.get_freqs()[100])
            for cnt,ch in enumerate(chans):
                eor1[cnt] = n.convolve(eor1[cnt], firs[ch], mode='same')
        #eor2 = eor.values()[0] * INJECT_SIG
        eor = eor1 * INJECT_SIG
        for k in days:
            for bl in x[k]: x[k][bl] += eor
        if False and PLOT:
            p.subplot(211); capo.arp.waterfall(eor1, mode='real'); p.colorbar()
            p.subplot(212); capo.arp.waterfall(eor2, mode='real'); p.colorbar(); p.show()

    #Q = {} # Create the Q's that extract power spectrum modes
    #for i in xrange(nchan):
    #    Q[i] = get_Q(i, nchan)
    Q = [get_Q(i,nchan) for i in xrange(nchan)]

    # Compute baseline auto-covariances and apply inverse to data
    I,_I,_Ix = {},{},{}
    C,_C,_Cx = {},{},{}
    for k in days:
        I[k],_I[k],_Ix[k] = {},{},{}
        C[k],_C[k],_Cx[k] = {},{},{}
        for bl in x[k]:
            C[k][bl] = cov(x[k][bl])
figdir1 = rootdir+'/fig/'
if not(os.path.isdir(figdir1)):
  os.mkdir(figdir1)
figdir=figdir1+'/'+config+'/'
if not(os.path.isdir(figdir)):
  os.mkdir(figdir)

if DISPLAYX:
  figx1 = pl.figure(2)
  #norm = colors.LogNorm(vmin=1, vmax=Ampx.max())
  norm = colors.Normalize(vmin=Ampx.min(),vmax=Ampx.max())
  pl.scatter(x0,y0,c=Ampx,cmap='jet',s=10,norm=norm)
  #pl.title('Amplitude Ex [$\mu$V/m]')
  pl.xlabel(xlbl)
  pl.ylabel(ylbl)
  cbar = pl.colorbar()
  cbar.set_label('Amplitude Vx [$\mu$V]')
  #pl.gca().set_aspect(aspect=4) #'equal'
  #pl.axis('equal')
  if lowcut==0 and highcut==0:
    figname = figdir+'/'+showerID+'_ampl_Vx_lin.png'
  else:
    figname = figdir+'/'+showerID+'_ampl_Vx_'+str(lowcut)+"-"+str(highcut)+"MHz_lin.png"
  pl.savefig(figname,dpi=500)
  #pl.show()
  #raw_input()
  #pl.close(figx)

  try:
    figx2 = pl.figure(3)
    norm = colors.LogNorm(vmin=1, vmax=Ampx.max())
Exemple #35
0
def RunAnimation(arg):
    import os, sys, time
    import subprocess
    import psutil
    import pylab as pl
    from IPython import display
    import matplotlib.gridspec as gridspec
    import seaborn as sns
    import pandas as pd
    import numpy as np

    print("RunAnimation")
    sys.stdout.flush()

    deviceCount = arg
    # Need this only for animation of GPU usage to be consistent with
    #from py3nvml.py3nvml import *
    import py3nvml
    maxNGPUS = int(subprocess.check_output("nvidia-smi -L | wc -l",
                                           shell=True))
    print("\nNumber of GPUS:", maxNGPUS)

    py3nvml.py3nvml.nvmlInit()
    total_deviceCount = py3nvml.py3nvml.nvmlDeviceGetCount()
    if deviceCount == -1:
        deviceCount = total_deviceCount
    #for i in range(deviceCount):
    #    handle = nvmlDeviceGetHandleByIndex(i)
    #    print("Device {}: {}".format(i, nvmlDeviceGetName(handle)))
    #print ("Driver Version:", nvmlSystemGetDriverVersion())
    print("Animation deviceCount=%d" % (deviceCount))

    file = os.getcwd() + "/error.txt"
    print("opening %s" % (file))
    fig = pl.figure(figsize=(9, 9))
    pl.rcParams['xtick.labelsize'] = 14
    pl.rcParams['ytick.labelsize'] = 14
    gs = gridspec.GridSpec(3, 2, wspace=0.3, hspace=0.4)
    ax1 = pl.subplot(gs[0, -2])
    ax2 = pl.subplot(gs[0, 1])
    ax3 = pl.subplot(gs[1:, :])
    fig.suptitle('H2O.ai Machine Learning $-$ Generalized Linear Modeling',
                 size=18)

    pl.gcf().subplots_adjust(bottom=0.2)

    #cb = False
    from matplotlib.colors import ListedColormap
    cm = ListedColormap(sns.color_palette("RdYlGn", 10).as_hex())
    cc = ax3.scatter([0.001, 0.001], [0, 0], c=[0, 1], cmap=cm)
    cb = pl.colorbar(cc, ax=ax3)
    os.system("mkdir -p images")
    i = 0
    while (True):
        #try:
        #print("In try i=%d" % i)
        #sys.stdout.flush()

        #cpu
        snapshot = psutil.cpu_percent(percpu=True)
        cpu_labels = range(1, len(snapshot) + 1)
        plot_cpu_perf(ax1, cpu_labels, snapshot)

        #gpu
        gpu_snapshot = []
        gpu_labels = list(range(1, deviceCount + 1))
        import py3nvml
        for j in range(deviceCount):
            handle = py3nvml.py3nvml.nvmlDeviceGetHandleByIndex(j)
            util = py3nvml.py3nvml.nvmlDeviceGetUtilizationRates(handle)
            gpu_snapshot.append(util.gpu)
        gpu_snapshot = gpu_snapshot
        plot_gpu_perf(ax2, gpu_labels, gpu_snapshot)

        res = pd.read_csv(file,
                          sep="\s+",
                          header=None,
                          names=[
                              'time', 'pass', 'fold', 'a', 'i', 'alpha',
                              'lambda', 'trainrmse', 'ivalidrmse', 'validrmse'
                          ])

        res['rel_acc'] = ((42665 - res['validrmse']) / (42665 - 31000))
        res['alpha_prime'] = res['alpha'] + res['fold'].apply(
            lambda x: new_alpha(x))

        best = res.loc[res['rel_acc'] == np.max(res['rel_acc']), :]
        plot_glm_results(ax3, res, best.tail(1), cb)
        # flag for colorbar to avoid redrawing
        #cb = True

        # Add footnotes
        footnote_text = "*U.S. Census dataset (predict Income): 45k rows, 10k cols\nParameters: 5-fold cross-validation, " + r'$\alpha = \{\frac{i}{7},i=0\ldots7\}$' + ", "\
'full $\lambda$-' + "search"
        #pl.figtext(.05, -.04, footnote_text, fontsize = 14,)
        pl.annotate(footnote_text, (0, 0), (-30, -50),
                    fontsize=12,
                    xycoords='axes fraction',
                    textcoords='offset points',
                    va='top')

        #update the graphics
        display.display(pl.gcf())
        display.clear_output(wait=True)
        time.sleep(0.01)

        #save the images
        saveimage = 0
        if saveimage:
            file_name = './images/glm_run_%04d.png' % (i, )
            pl.savefig(file_name, dpi=200)
        i = i + 1
grid_x = np.arange(n_grid[0] + 1) * grid_size + pos_min[0]
grid_y = np.arange(n_grid[1] + 1) * grid_size + pos_min[1]

#f = plt.figure(figsize=(12, 5))
f = plt.figure(figsize=(6, 5))
plt.scatter(x[w],
            y[w],
            c=catalog['vel'][w],
            cmap='seismic',
            s=2,
            vmin=-1000,
            vmax=1000)
f.axes[0].set_aspect('equal')
plt.xlabel(r'x [$h^{-1}$ Mpc]')
plt.ylabel(r'y [$h^{-1}$ Mpc]')
cbar = plt.colorbar()
cbar.set_label('Velocity [km/s]', rotation=270)

if add_grid:
    for g in grid_x:
        plt.axvline(g, color='k', ls='--', lw=1, alpha=0.5)
    for g in grid_y:
        if g >= -grid_size:
            plt.axhline(g, color='k', ls='--', lw=1, alpha=0.5)

    wg = (yg > -grid_size / 2) & (zg > -grid_size / 2) & (zg < grid_size / 2)
    plt.autoscale(False)
    plt.scatter(xg[wg],
                yg[wg],
                c=vg[wg],
                s=1400,
Exemple #37
0
# Training the SOM
from minisom import MiniSom

# STEP 2 CREATE A GRID COMPOSED OF NODES EACH ONE HAVING A WEIGHT VECTOR OF N_FEATURE ELEMENTS
som = MiniSom(x=10, y=10, input_len=15, sigma=1.0, learning_rate=0.5)

# STEP 3 RANDOMLY INITIALIZE THE VALUES OF THE WEIGHT VECTORS TO SMALL NUMBERS CLOSER TO 0 (BUT NOT 0)
som.random_weights_init(X)
som.train_random(data=X, num_iteration=100)

# Visualizing the results
from pylab import bone, pcolor, colorbar, plot, show
bone()
pcolor(som.distance_map().T)
colorbar()
markers = ['o', 's']
colors = ['r', 'g']
for i, x in enumerate(X):
    w = som.winner(x)
    plot(w[0] + 0.5,
         w[1] + 0.5,
         markers[y[i]],
         markeredgecolor=colors[y[i]],
         markerfacecolor='None',
         markersize=10,
         markeredgewidth=2)
show()

# Finding the frauds
mappings = som.win_map(X)
Exemple #38
0
    print Ts.shape
    for i in xrange(Ts.shape[0]):
        if n.random.uniform() > .5:
            Ts[i] *= -1
            Ns[i] *= -1

print Ts.shape
print Ns.shape
#print times[300], times[500]
#print ' '.join(['%d_%d' % a.miriad.bl2ij(bl) for bl in bls])
#sys.stdout.flush()
if PLOT:
    #capo.arp.waterfall(cov(Ts), mode='log', drng=2); p.show()
    p.subplot(141)
    capo.arp.waterfall(Ts, mode='log', mx=1, drng=2)
    p.colorbar(shrink=.5)
    p.title('Vis in K. bls X ints.', fontsize=8)
    p.subplot(142)
    capo.arp.waterfall(Ns, mode='log')  #, mx=1, drng=2); p.colorbar(shrink=.5)
    p.title('FRF eor_model.', fontsize=8)
    p.subplot(143)
    capo.arp.waterfall(Ws)
    p.colorbar(shrink=0.5)
    p.title('Weights in samples bls x ints', fontsize=8)
    p.subplot(144)
    capo.arp.waterfall(cov(Ts), mode='log', drng=3)
    p.colorbar(shrink=.5)
    print cov(Ts).shape
    p.title('cov(Ts)', fontsize=8)
    p.show()
    p.subplot(121)
Exemple #39
0
def search(tile):

    if os.path.exists('rogue-%s-02.png' %
                      tile) and not os.path.exists('rogue-%s-03.png' % tile):
        print 'Skipping', tile
        return

    fn = os.path.join(tile[:3], tile, 'unwise-%s-w2-%%s-m.fits' % tile)

    try:
        II = [fitsio.read(os.path.join('e%i' % e, fn % 'img')) for e in [1, 2]]
        PP = [fitsio.read(os.path.join('e%i' % e, fn % 'std')) for e in [1, 2]]
        wcs = Tan(os.path.join('e%i' % 1, fn % 'img'))
    except:
        import traceback
        print
        print 'Failed to read data for tile', tile
        traceback.print_exc()
        print
        return
    H, W = II[0].shape

    ps = PlotSequence('rogue-%s' % tile)

    aa = dict(interpolation='nearest', origin='lower')
    ima = dict(interpolation='nearest', origin='lower', vmin=-100, vmax=500)

    plt.clf()
    plt.imshow(II[0], **ima)
    plt.title('Epoch 1')
    ps.savefig()
    plt.clf()
    plt.imshow(II[1], **ima)
    plt.title('Epoch 2')
    ps.savefig()

    # X = gaussian_filter(np.abs((II[0] - II[1]) / np.hypot(PP[0], PP[1])), 1.0)
    # plt.clf()
    # plt.imshow(X, interpolation='nearest', origin='lower')
    # plt.title('Blurred abs difference / per-pixel-std')
    # ps.savefig()

    # Y = (II[0] - II[1]) / reduce(np.hypot, [PP[0], PP[1], np.hypot(100,II[0]), np.hypot(100,II[1]) ])
    Y = (II[0] - II[1]) / reduce(np.hypot, [PP[0], PP[1]])
    X = gaussian_filter(np.abs(Y), 1.0)

    xthresh = 3.

    print 'Value at rogue:', X[1452, 1596]

    print 'pp at rogue:', [pp[1452, 1596] for pp in PP]

    plt.clf()
    plt.imshow(X, interpolation='nearest', origin='lower')
    plt.title('X')
    ps.savefig()

    # plt.clf()
    # plt.hist(np.minimum(100, PP[0].ravel()), 100, range=(0,100),
    #          histtype='step', color='r')
    # plt.hist(np.minimum(100, PP[1].ravel()), 100, range=(0,100),
    #          histtype='step', color='b')
    # plt.title('Per-pixel std')
    # ps.savefig()

    #Y = ((II[0] - II[1]) / np.hypot(PP[0], PP[1]))
    #Y = gaussian_filter(
    #    (II[0] - II[1]) / np.hypot(100, np.hypot(II[0], II[1]))
    #    , 1.0)

    #I = np.argsort(-X.ravel())
    #yy,xx = np.unravel_index(I[:25], X.shape)
    #print 'xx', xx
    #print 'yy', yy

    hot = (X > xthresh)
    peak = find_peaks(hot, X)
    dilate = 2
    hot = binary_dilation(hot, structure=np.ones((3, 3)), iterations=dilate)
    blobs, nblobs = label(hot, np.ones((3, 3), int))
    blobslices = find_objects(blobs)
    # Find maximum pixel within each blob.
    BX, BY = [], []
    BV = []
    for b, slc in enumerate(blobslices):
        sy, sx = slc
        y0, y1 = sy.start, sy.stop
        x0, x1 = sx.start, sx.stop
        bl = blobs[slc]
        i = np.argmax((bl == (b + 1)) * X[slc])
        iy, ix = np.unravel_index(i, dims=bl.shape)
        by = iy + y0
        bx = ix + x0
        BX.append(bx)
        BY.append(by)
        BV.append(X[by, bx])
    BX = np.array(BX)
    BY = np.array(BY)
    BV = np.array(BV)
    I = np.argsort(-BV)
    xx, yy = BX[I], BY[I]

    keep = []
    S = 15
    for i, (x, y) in enumerate(zip(xx, yy)):
        #print x,y
        if x < S or y < S or x + S >= W or y + S >= H:
            continue

        slc = slice(y - S, y + S + 1), slice(x - S, x + S + 1)
        slc2 = slice(y - 3, y + 3 + 1), slice(x - 3, x + 3 + 1)

        mx = np.max((II[0][slc] + II[1][slc]) / 2.)
        #print 'Max within slice:', mx
        #if mx > 5e3:
        if mx > 2e3:
            continue

        mx2 = np.max((II[0][slc2] + II[1][slc2]) / 2.)
        print 'Flux near object:', mx2
        if mx2 < 250:
            continue

        #miny = np.min(Y[slc2])
        #maxy = np.max(Y[slc2])
        keep.append(i)

    keep = np.array(keep)
    if len(keep) == 0:
        print 'No objects passed cuts'
        return
    xx = xx[keep]
    yy = yy[keep]

    plt.clf()
    plt.imshow(X, interpolation='nearest', origin='lower', cmap='gray')
    plt.title('X')
    ax = plt.axis()
    plt.plot(xx, yy, 'r+')
    plt.plot(1596, 1452, 'o', mec=(0, 1, 0), mfc='none')
    plt.axis(ax)
    ps.savefig()

    ylo, yhi = [], []
    for i in range(min(len(xx), 100)):
        x, y = xx[i], yy[i]
        slc2 = slice(y - 3, y + 3 + 1), slice(x - 3, x + 3 + 1)
        ylo.append(np.min(Y[slc2]))
        yhi.append(np.max(Y[slc2]))
    plt.clf()
    plt.plot(ylo, yhi, 'r.')
    plt.axis('scaled')
    ps.savefig()

    for i, (x, y) in enumerate(zip(xx, yy)[:50]):
        print x, y
        rows, cols = 2, 3
        ra, dec = wcs.pixelxy2radec(x + 1, y + 1)

        slc = slice(y - S, y + S + 1), slice(x - S, x + S + 1)
        slc2 = slice(y - 3, y + 3 + 1), slice(x - 3, x + 3 + 1)

        mx = max(np.max(II[0][slc]), np.max(II[1][slc]))
        print 'Max within slice:', mx
        miny = np.min(Y[slc2])
        maxy = np.max(Y[slc2])

        plt.clf()

        plt.subplot(rows, cols, 1)
        plt.imshow(II[0][slc], **ima)
        plt.xticks([])
        plt.yticks([])
        plt.colorbar()
        plt.title('epoch 1')

        plt.subplot(rows, cols, 2)
        plt.imshow(II[1][slc], **ima)
        plt.xticks([])
        plt.yticks([])
        plt.colorbar()
        plt.title('epoch 2')

        plt.subplot(rows, cols, 3)
        plt.imshow(PP[0][slc], **aa)
        plt.xticks([])
        plt.yticks([])
        plt.colorbar()
        plt.title('std 1')

        plt.subplot(rows, cols, 6)
        plt.imshow(PP[1][slc], **aa)
        plt.xticks([])
        plt.yticks([])
        plt.colorbar()
        plt.title('std 2')

        plt.subplot(rows, cols, 4)
        plt.imshow(X[slc], **aa)
        plt.xticks([])
        plt.yticks([])
        plt.colorbar()
        plt.title('X')

        plt.subplot(rows, cols, 5)
        plt.imshow(Y[slc], **aa)
        plt.xticks([])
        plt.yticks([])
        plt.colorbar()
        plt.title('Y')

        #plt.suptitle('Tile %s, Flux: %4.0f, Range: %.2g %.2g' % (tile,mx,miny,maxy))
        plt.suptitle('Tile %s, RA,Dec (%.4f, %.4f)' % (tile, ra, dec))

        ps.savefig()
Exemple #40
0
    #ax.imshow(grid, extent=extent)
    ax.pcolormesh(np.log10(densities), temperatures, grid)
    ax.set_title('$%s$ o-H$_2$CO %s GHz' % (label, freq))
    ax.set_xlabel('log Density')
    ax.set_ylabel('Temperature')

pl.figure(2)
pl.clf()
ax = pl.subplot(2, 1, 1)
#ax.imshow(grid, extent=extent)
cax = ax.pcolormesh(np.log10(densities),
                    temperatures,
                    taugrid_140 / taugrid_150,
                    vmax=1.3,
                    vmin=0.8)
pl.colorbar(cax)
ax.set_title('$\\tau$ o-H$_2$CO 140/150 GHz')
ax.set_xlabel('log Density')
ax.set_ylabel('Temperature')

ax = pl.subplot(2, 1, 2)
#ax.imshow(grid, extent=extent)
cax = ax.pcolormesh(np.log10(densities),
                    temperatures,
                    texgrid_140 / texgrid_150,
                    vmax=1.3,
                    vmin=0.8)
pl.colorbar(cax)
ax.set_title('$T_{ex}$ o-H$_2$CO 140/150 GHz')
ax.set_xlabel('log Density')
ax.set_ylabel('Temperature')
Exemple #41
0
def plot_composite_matrix(D,
                          labeltext,
                          show_labels=True,
                          show_indices=True,
                          vmax=1.0,
                          vmin=0.0,
                          force=False):
    """Build a composite plot showing dendrogram + distance matrix/heatmap.

    Returns a matplotlib figure."""
    if D.max() > 1.0 or D.min() < 0.0:
        error(
            'This matrix doesn\'t look like a distance matrix - min value {}, max value {}',
            D.min(), D.max())
        if not force:
            raise ValueError("not a distance matrix")
        else:
            notify('force is set; scaling to [0, 1]')
            D -= D.min()
            D /= D.max()

    if show_labels:
        show_indices = True

    fig = pylab.figure(figsize=(11, 8))
    ax1 = fig.add_axes([0.09, 0.1, 0.2, 0.6])

    # plot dendrogram
    Y = sch.linkage(D, method='single')  # centroid

    dendrolabels = labeltext
    if not show_labels:
        dendrolabels = [str(i) for i in range(len(labeltext))]

    Z1 = sch.dendrogram(Y,
                        orientation='left',
                        labels=dendrolabels,
                        no_labels=not show_indices)
    ax1.set_xticks([])

    xstart = 0.45
    width = 0.45
    if not show_labels:
        xstart = 0.315
    scale_xstart = xstart + width + 0.01

    # plot matrix
    axmatrix = fig.add_axes([xstart, 0.1, width, 0.6])

    # (this reorders D by the clustering in Z1)
    idx1 = Z1['leaves']
    D = D[idx1, :]
    D = D[:, idx1]

    # show matrix
    im = axmatrix.matshow(D,
                          aspect='auto',
                          origin='lower',
                          cmap=pylab.cm.YlGnBu,
                          vmin=vmin,
                          vmax=vmax)
    axmatrix.set_xticks([])
    axmatrix.set_yticks([])

    # Plot colorbar.
    axcolor = fig.add_axes([scale_xstart, 0.1, 0.02, 0.6])
    pylab.colorbar(im, cax=axcolor)

    return fig
Exemple #42
0
 def scatterplot(self ,  xvars , yvars , colorvars , colormap = None ):
     cm = pl.cm.get_cmap(colormap)
     sc = self.fig.axes[self.axnum].scatter(xvars ,yvars, c=colorvars, cmap = cm )
     pl.colorbar(sc)
Exemple #43
0
#test_netcdf: create a netcdf file with bogus data
#Paul Kushner
#See netCDF4 documentation at http://unidata.github.io/netcdf4-python/
import netCDF4
import numpy
import pylab
pylab.ion() 
file=netCDF4.Dataset('temperature_time_record.nc')
xval = file.variables['x']
yval = file.variables['y']
tval = file.variables['t']
data = file.variables['temperature']
print 'shape xval, yval, data',numpy.shape(xval),numpy.shape(yval),numpy.shape(data)
pylab.figure(1)
pylab.subplot(2,1,1)
pylab.plot(xval,data[0,2,:],'r')
pylab.plot(xval,data[1,2,:],'b')
pylab.legend(('time %3.1f s'%tval[0],'time %3.1f'%tval[1]))
pylab.title('temperature for y=%3.1f %s'%(yval[2],yval.units))
pylab.xlabel('x(%s)'%xval.units)
pylab.ylabel('t(%s)'%tval.units)
pylab.subplot(2,1,2)
pylab.contourf(xval,yval,data[2,:,:])
pylab.title('Temperature(%s) at time %3.1f %s'%(data.units,tval[2],tval.units))
pylab.xlabel('x(%s)'%xval.units)
pylab.ylabel('y(%s)'%yval.units)
pylab.colorbar()
pylab.show()
Exemple #44
0
def single_value_visualization(simulation_name,master_results_dir,query,value_names=None,filename=None,resolution=None,treat_nan_as_zero=False,ranges={},cols=4):
    """
    Visualizes all single values (or those whose names match ones in `value_names` argument)
    present in the datastores of parameter search over a fixed set of parameters. 
    
    Parameters
    ----------
    simulation_name : str
                    The name of the simulation.
    master_results_dir : str
                    The directory where the parameter search results are stored.
    query : ParamFilterQuery
          ParamFilterQuery filter query instance that will be applied to each datastore before records are retrieved.
    
    value_names : list(str)
                  List of value names to visualize.  
    file_name : str
              The file name into which to save the resulting figure. If None figure is just displayed.  
    resolution : int
               If not None data will be plotted on a interpolated grid of size (resolution,...,resolution)
    ranges : dict
           A dictionary with value names as keys, and tuples of (min,max) ranges as values indicating what range of values should be displayed.
           
    cols : int
         The number of columns in which to show plots, default is 4.
               
    """
    (parameters,datastores,n) = load_fixed_parameter_set_parameter_search(simulation_name,master_results_dir,filter=ParamFilterQuery(ParameterSet({'ads_unique' : False, 'rec_unique' : False, 'params' : ParameterSet({'identifier' : 'SingleValue'})})))
    # print(parameters)

    # Lets first filter out parameters that do not vary
    todelete=[];
    for i in range(0,len(parameters)):
        vals = set([v[0][i] for v in datastores])
        # print(vals)
        if len(vals) == 1:
            todelete.append(i)
            # print(todelete)
    for k in range(0,len(datastores)):
        datastores[k] = ([i for j, i in enumerate(datastores[k][0]) if j not in todelete],datastores[k][1])
    parameters = [i for j, i in enumerate(parameters) if j not in todelete]
    # print(parameters)

    # Lets first filter out stuff we were asked by user
    datastores = [(a,query.query(b)) for a,b in datastores]
    
    sorted_parameter_indexes = zip(*sorted(enumerate(parameters), key=lambda x: x[1]))[0]
    
    # if value_names is None lets set it to set of value_names in the first datastore
    if value_names == None:
        value_names = set([])
        for d in datastores:
            value_names.update(set([ads.value_name for ads in param_filter_query(d[1],identifier='SingleValue').get_analysis_result()]))
        value_names = set(sorted(value_names))
    # print("Value names:")
    # print(value_names)
    
    # Lets first make sure that the value_names uniqly identify a SingleValue ADS in each DataStore and 
    # that they exist in each DataStore.
    for (param_values,datastore) in datastores: 
        for v in value_names:
            if len(param_filter_query(datastore,identifier='SingleValue',value_name=v).get_analysis_result()) > 1:
                param_filter_query(datastore,identifier='SingleValue',value_name=v).print_content(full_ADS=True)
            #assert len(param_filter_query(datastore,identifier='SingleValue',value_name=v).get_analysis_result()) == 1, "Error, %d ADS with value_name %s found for parameter combination: %s" % (len(param_filter_query(datastore,identifier='SingleValue',value_name=v).get_analysis_result()),v, str([str(a) + ':' + str(b) for (a,b) in zip(parameters,param_values)]))
    
    rows = math.ceil(1.0*len(value_names)/cols)
    
    pylab.figure(figsize=(12*cols, 6*rows), dpi=300, facecolor='w', edgecolor='k')
                
    # print(rows)
    # print(cols)
    # print("Plotting")
    res = {}
    for i,value_name in enumerate(value_names): 
        pylab.subplot(rows,cols,i+1)
        if len(parameters) == 1:
            x = []
            y = []
            for (param_values,datastore) in datastores:
                x.append(param_values[0])
                y.append(float(adss[0].value))
            pylab.plot(x,y)
            pylab.plot(x,y,marker='o')
            pylab.xlabel(parameters[sorted_parameter_indexes[0]])
            pylab.ylabel(value_name)
               
        elif len(parameters) == 2:
            # print('*****************************')
            # print(i)
            # print(len(datastores))
            x = []
            y = []
            z = []
            for (param_values,datastore) in datastores:
                adss = param_filter_query(datastore,identifier='SingleValue',value_name=value_name).get_analysis_result()
                if len(adss)>0:
                    x.append(param_values[sorted_parameter_indexes[0]])
                    y.append(param_values[sorted_parameter_indexes[1]])
                    z.append(float(adss[0].value))
            if treat_nan_as_zero:
                z = numpy.nan_to_num(z)
               
            if value_name in ranges:
                vmin,vmax = ranges[value_name]
            else:
                # print(value_name)
                # print(z)
                # print(min(z))
                # print(max(z))
                vmin = min(z)
                vmax = max(z)

            if resolution != None:
                xi = numpy.linspace(numpy.min(x),numpy.max(x),resolution)
                yi = numpy.linspace(numpy.min(y),numpy.max(y),resolution)
                gr = griddata((x,y),z,(xi[None, :], yi[:, None]),method='cubic')
                pylab.imshow(gr,interpolation='none',vmin=vmin,vmax=vmax,aspect='auto',cmap=cm.gray,origin='lower',extent=[numpy.min(x),numpy.max(x),numpy.min(y),numpy.max(y)])
            else:
                pylab.scatter(x,y,marker='o',s=300,c=z,cmap=cm.jet,vmin=vmin,vmax=vmax)
                pylab.xlim(min(x)-0.1*(max(x)-min(x)),max(x)+0.1*(max(x)-min(x)))
                pylab.ylim(min(y)-0.1*(max(y)-min(y)),max(y)+0.1*(max(y)-min(y)))
                pylab.colorbar()

                res[value_name]=((parameters[sorted_parameter_indexes[0]],parameters[sorted_parameter_indexes[1]]),x,y,z)

                f = open(v+'.pickle','w')
                pickle.dump((value_name,parameters[sorted_parameter_indexes[0]],parameters[sorted_parameter_indexes[1]],x,y,z),f)
                f.close()
               
            pylab.xlabel(parameters[sorted_parameter_indexes[0]])
            pylab.ylabel(parameters[sorted_parameter_indexes[1]])
        else:
            raise ValueError("Currently cannot handle more than 2D data")
        pylab.title(value_name)    

        import scipy
        f = open('ps_res.pickle','w')
        pickle.dump(res,f)
        scipy.io.savemat('ps_res.mat', res)
        f.close()

    if filename != None:
       pylab.savefig(master_results_dir+'/'+filename, bbox_inches='tight')
    a[4][4] = data_sheet.cell_value(i + 1, 14)

    #norm1 = plt.colors.Normalize(vmin=0,vmax=maximum/4)
    pylab.figure()
    cmap = pylab.cm.jet
    im = pylab.imshow(a, cmap=cmap, interpolation='nearest')
    pylab.title(str(data_sheet.cell_value(i + 1, 0)))
    for ind1 in range(5):
        for ind2 in range(5):
            pylab.text(ind1,
                       ind2,
                       int(b[ind1][ind2]),
                       va='center',
                       ha='center')
    pylab.axis('off')
    pylab.colorbar(im)
    pylab.savefig("heatmaps_jet/" + str(data_sheet.cell_value(i + 1, 0)) +
                  ".png")
    pylab.show()

    pylab.figure()
    cmap = pylab.cm.PiYG
    im = pylab.imshow(a, cmap=cmap, interpolation='nearest')
    pylab.title(str(data_sheet.cell_value(i + 1, 0)))
    for ind1 in range(5):
        for ind2 in range(5):
            pylab.text(ind1,
                       ind2,
                       int(b[ind1][ind2]),
                       va='center',
                       ha='center')
Exemple #46
0
plotInterval = N / 10

for t in xrange(N):
    i = np.random.randn(1,cfg.layerTestSizes[layer],cfg.layerTestSizes[layer])
    cv = float(np.squeeze(n.run(i,channel,layer)))
    # av += cv
    # r = cv / av
    ab += i[:] * cv #/ N #ab * (1-r) + i * r
    cvs.append(float(cv))
    if (not t % testInterval):
        testVals.append(float(np.squeeze(n.run(ab[:],channel,layer))))
    if t and (not (t % plotInterval)):
        pl.figure()
        pl.imshow(ab[0])
        pl.title("t = %i" % t)
        pl.colorbar()
        pl.savefig('%s/stim_%i.png' % (outDir, t))
        pl.close()
        
        pl.figure()
        pl.plot(testVals)
        pl.savefig('%s/progress_%i.png' % (outDir, t))
        pl.close()
        # pl.show()
        
        pl.imsave('%s/optimal_%i.png' % (outDir, t), ab[0], cmap=pl.cm.gray)

pl.figure()
pl.imshow(ab[0])
pl.title("t = %i" % t)
pl.colorbar()
Exemple #47
0
    def plot(self,valueRange = None,\
             show = True,\
             saveFig = None,\
             colorMapName = 'jet',\
             colBarLabel = None,\
             colBarOrient ='vertical',\
             colBarShrink = 1.0,\
             useImagePlot = False,\
             **kwd_args):
        """
        @brief Plots a liteMap using astLib.astPlots.ImagePlot.

        The axes can be marked in either sexagesimal or decimal celestial coordinates.
        If RATickSteps or decTickSteps are set to "auto", the appropriate axis scales will
        be determined automatically from the size of the image array and associated WCS.
        The tick step sizes can be overidden.
        If the coordinate axes are in sexagesimal format a dictionary in the format
        {'deg', 'unit'}. If the coordinate axes are in
        decimal format, the tick step size is specified simply in RA, dec decimal degrees.
        
        
        @param valueRange A tuple e.g. [-300,300] specifying the limits of the colorscale
        @param show Show the plot instead of saving
        @param saveFig save to a file
        @param colorMapName name of pylab.cm colorMap
        @param colBarLabel add label to the colorBar
        @param colBarOrient orientation of the colorbar (can be 'vertical'(default) or 'horizontal'
        @param colBarShrink shrink the color
        @param kwd_args all keywords accepted by astLib.astPlots.ImagePlot:
        @type axesLabels: string
        @param axesLabels: either "sexagesimal" (for H:M:S, D:M:S), "decimal" (for decimal degrees)
        or None (for no coordinate axes labels)
        @type axesFontFamily: string
        @param axesFontFamily: matplotlib fontfamily, e.g. 'serif', 'sans-serif' etc.
        @type axesFontSize: float
        @param axesFontSize: font size of axes labels and titles (in points)
        @param RATickSteps See docstring above
        @param decTickSteps  See docstring above
        """
        if valueRange != None:
            vmin = valueRange[0]
            vmax = valueRange[1]
        else:
            vmin = self.data.min()
            vmax = self.data.max()

        # Convert name to a matplotlib.cm.Colormap instance
        try:
            cmap = pylab.cm.__dict__[colorMapName]
        except KeyError:
            cmap = pylab.cm.hsv

        if not (useImagePlot):
            pylab.imshow(self.data,origin="down",vmin=vmin,vmax=vmax,\
                         extent=[self.x0,self.x1,self.y0,self.y1],\
                         aspect=1./(np.cos(0.5*np.pi/180.*(self.y0+self.y1))),\
                         cmap=cmap)
        else:
            astLib.astPlots.ImagePlot(self.data,self.wcs,colorMapName=colorMapName,\
                                      cutLevels=[vmin,vmax],colorBar=False,**kwd_args)

        cb = pylab.colorbar(orientation=colBarOrient, shrink=colBarShrink)
        if colBarLabel != None:
            cb.set_label(colBarLabel)

        #pylab.xlabel('Ra (degrees)')
        #pylab.ylabel('Dec (degrees)')
        #pylab.title(title)
        if saveFig != None:
            pylab.savefig(saveFig)
        if show:
            pylab.show()
Exemple #48
0
 def plot_it(self):
     d = self.data[:, :, 180]
     d[d < 0.2] = 0
     im = plt.imshow(d, cmap='hot')
     plt.colorbar(im, orientation='horizontal')
     plt.show()
Exemple #49
0
        use_bias=False)(zero_pad1)
    batchnorm1=tkl.BatchNormalization()(conv)
    leaky_relu=tkl.LeakyReLU()(batchnorm1)
    zero_pad2=tkl.ZeroPadding2D()(leaky_relu)
    last=tkl.Conv2D(
        1,4,strides=1,kernel_initializer=initializer)(zero_pad2)
    return tf.keras.Model(inputs=[inp,tar],outputs=last)

discriminator=Discriminator()
tku.plot_model(discriminator,show_shapes=True,dpi=48)

disc_output_img=discriminator(
    [input_img[tf.newaxis,...],gen_output_img],training=False)
pl.imshow(disc_output_img[0,...,-1],
          vmin=-20,vmax=20,cmap='jet')
pl.colorbar(); pl.tight_layout();

def discriminator_loss(disc_real_output,disc_generated_output):
    real_loss=loss_object(
        tf.ones_like(disc_real_output),disc_real_output)
    generated_loss=loss_object(
        tf.zeros_like(disc_generated_output),disc_generated_output)
    total_disc_loss=real_loss+generated_loss
    return total_disc_loss

# Commented out IPython magic to ensure Python compatibility.
# %ch1 Model Optimizers, Callbacks & Visualizations

generator_optimizer=tf.keras.optimizers.Adam(2e-4,beta_1=.5)
discriminator_optimizer=tf.keras.optimizers.Adam(2e-4,beta_1=.5)
Exemple #50
0
    def plot(self, labels=False, show=True):
        '''
        This function plots 2D and 3D models
        :param labels:
        :param show: If True, the plots are displayed at the end of this call. If False, plt.show() should be called outside this function
        :return:
        '''
        if self.k == 3:
            import mayavi.mlab as mlab

            predictFig = mlab.figure(figure='predict')
            errorFig = mlab.figure(figure='error')
            if self.testfunction:
                truthFig = mlab.figure(figure='test')
            dx = 1
            pts = 25j
            X, Y, Z = np.mgrid[0:dx:pts, 0:dx:pts, 0:dx:pts]
            scalars = np.zeros(X.shape)
            errscalars = np.zeros(X.shape)
            for i in range(X.shape[0]):
                for j in range(X.shape[1]):
                    for k1 in range(X.shape[2]):
                        errscalars[i][j][k1] = self.predicterr_normalized(
                            [X[i][j][k1], Y[i][j][k1], Z[i][j][k1]])
                        scalars[i][j][k1] = self.predict_normalized(
                            [X[i][j][k1], Y[i][j][k1], Z[i][j][k1]])

            if self.testfunction:
                tfscalars = np.zeros(X.shape)
                for i in range(X.shape[0]):
                    for j in range(X.shape[1]):
                        for k1 in range(X.shape[2]):
                            tfplot = tfscalars[i][j][k1] = self.testfunction(
                                [X[i][j][k1], Y[i][j][k1], Z[i][j][k1]])
                plot = mlab.contour3d(tfscalars,
                                      contours=15,
                                      transparent=True,
                                      figure=truthFig)
                plot.compute_normals = False

            # obj = mlab.contour3d(scalars, contours=10, transparent=True)
            plot = mlab.contour3d(scalars,
                                  contours=15,
                                  transparent=True,
                                  figure=predictFig)
            plot.compute_normals = False
            errplt = mlab.contour3d(errscalars,
                                    contours=15,
                                    transparent=True,
                                    figure=errorFig)
            errplt.compute_normals = False
            if show:
                mlab.show()

        if self.k == 2:
            fig = pylab.figure(figsize=(8, 6))
            samplePoints = zip(*self.X)
            # Create a set of data to plot
            plotgrid = 61
            x = np.linspace(self.normRange[0][0],
                            self.normRange[0][1],
                            num=plotgrid)
            y = np.linspace(self.normRange[1][0],
                            self.normRange[1][1],
                            num=plotgrid)

            x = np.linspace(0, 1, num=plotgrid)
            y = np.linspace(0, 1, num=plotgrid)
            X, Y = np.meshgrid(x, y)

            # Predict based on the optimized results

            zs = np.array([
                self.predict([x, y]) for x, y in zip(np.ravel(X), np.ravel(Y))
            ])
            Z = zs.reshape(X.shape)
            # Z = (Z*(self.ynormRange[1]-self.ynormRange[0]))+self.ynormRange[0]

            #Calculate errors
            zse = np.array([
                self.predict_var([x, y])
                for x, y in zip(np.ravel(X), np.ravel(Y))
            ])
            Ze = zse.reshape(X.shape)

            spx = (self.X[:, 0] * (self.normRange[0][1] - self.normRange[0][0])
                   ) + self.normRange[0][0]
            spy = (self.X[:, 1] * (self.normRange[1][1] - self.normRange[1][0])
                   ) + self.normRange[1][0]
            contour_levels = 25

            ax = fig.add_subplot(222)
            CS = pylab.contourf(X, Y, Ze, contour_levels)
            pylab.colorbar()
            pylab.plot(spx, spy, 'ow')

            ax = fig.add_subplot(221)
            if self.testfunction:
                # Setup the truth function
                zt = self.testfunction(np.array(zip(np.ravel(X), np.ravel(Y))))
                ZT = zt.reshape(X.shape)
                CS = pylab.contour(X,
                                   Y,
                                   ZT,
                                   contour_levels,
                                   colors='k',
                                   zorder=2)

            # contour_levels = np.linspace(min(zt), max(zt),50)
            if self.testfunction:
                contour_levels = CS.levels
                delta = np.abs(contour_levels[0] - contour_levels[1])
                contour_levels = np.insert(contour_levels, 0,
                                           contour_levels[0] - delta)
                contour_levels = np.append(contour_levels,
                                           contour_levels[-1] + delta)

            CS = plt.contourf(X, Y, Z, contour_levels, zorder=1)
            pylab.plot(spx, spy, 'ow', zorder=3)
            pylab.colorbar()

            ax = fig.add_subplot(212, projection='3d')
            # fig = plt.gcf()
            #ax = fig.gca(projection='3d')
            ax.plot_surface(X, Y, Z, rstride=3, cstride=3, alpha=0.4)
            if self.testfunction:
                ax.plot_wireframe(X, Y, ZT, rstride=3, cstride=3)
            if show:
                pylab.show()
Exemple #51
0
def plotField(field, target_fields=None, completed_fields=None, options_basemap={}, **kwargs):
    """
    Plot a specific target field.

    Parameters:
    -----------
    field            : The specific field of interest.
    target_fields    : The fields that will be observed
    completed_fields : The fields that have been observed
    options_basemap  : Keyword arguments to the basemap constructor
    kwargs           : Keyword arguments to the matplotlib.scatter function

    Returns:
    --------
    basemap : The basemap object
    """
    if isinstance(field,np.core.records.record):
        tmp = FieldArray(1)
        tmp[0] = field
        field = tmp
    band = field[0]['FILTER']
    cmap = matplotlib.cm.get_cmap(CMAPS[band])
    defaults = dict(marker='H',s=100,edgecolor='',vmin=-1,vmax=4,cmap=cmap)
    #defaults = dict(edgecolor='none', s=50, vmin=0, vmax=4, cmap='summer_r')
    #defaults = dict(edgecolor='none', s=50, vmin=0, vmax=4, cmap='gray_r')
    setdefaults(kwargs,defaults)

    msg="%s: id=%10s, "%(datestring(field['DATE'][0],0),field['ID'][0])
    msg +="ra=%(RA)-6.2f, dec=%(DEC)-6.2f, secz=%(AIRMASS)-4.2f"%field[0]
    logging.info(msg)

    defaults = dict(date=field['DATE'][0], name='ortho')
    options_basemap = dict(options_basemap)
    setdefaults(options_basemap,defaults)
    fig, basemap = makePlot(**options_basemap)
    plt.subplots_adjust(left=0.03,right=0.97,bottom=0.03,top=0.97)

    # Plot target fields
    if target_fields is not None and len(target_fields):
        sel = target_fields['FILTER']==band
        x,y = basemap.proj(target_fields['RA'], target_fields['DEC'])
        kw = dict(kwargs,c='w',edgecolor='0.6',s=0.8*kwargs['s'])
        basemap.scatter(x[sel], y[sel], **kw)
        kw = dict(kwargs,c='w',edgecolor='0.8',s=0.8*kwargs['s'])
        basemap.scatter(x[~sel], y[~sel], **kw)

    # Plot completed fields
    if completed_fields is not None and len(completed_fields):
        sel = completed_fields['FILTER']==band
        x,y = basemap.proj(completed_fields['RA'],completed_fields['DEC'])
        kw = dict(kwargs)
        basemap.scatter(x[~sel], y[~sel], c='0.6', **kw)
        basemap.scatter(x[sel], y[sel], c=completed_fields['TILING'][sel], **kw)

    # Try to draw the colorbar
    try:
        if len(fig.axes) == 2:
            # Draw colorbar in existing axis
            colorbar = plt.colorbar(cax=fig.axes[-1])
        else:
            colorbar = plt.colorbar()
        colorbar.set_label('Tiling (%s-band)'%band)
    except TypeError:
        pass
    plt.sca(fig.axes[0])

    # Show the selected field
    x,y = basemap.proj(field['RA'], field['DEC'])
    kw = dict(kwargs,edgecolor='k')
    basemap.scatter(x,y,c=COLORS[band],**kw)

    return basemap
Exemple #52
0
scaler = MinMaxScaler(feature_range=(0,1))
x = scaler.fit_transform(x) #should we even transform the ID?


#10x10 grid output with 15 dim input and sigma of 1
som = MiniSom(x=10,y=10, input_len=15,sigma=1.0, learning_rate=0.2)
som.random_weights_init(x)
som.train_random(x, num_iteration=200)


from pylab import bone, pcolor, colorbar, plot, show
bone()

#using colors instead of bones to show the map
pcolor(som.distance_map().T) #distance_map will return a matrix of mean interneuron distances (transposed)
colorbar() #legend, intensity of color. Dark = low mid, light = high mid, frauds

#to tell if the customers associated to the light color = approved or not
markers = ['o', 's'] #circles and squares. O for non-approval, and s for approval
colors = ['r', 'b']

for i,j in enumerate(x):
    #get the winning node in the customer i
    winning_node = som.winner(j)
    plot(winning_node[0]+0.5, winning_node[1]+0.5, markers[y[i]],
         markeredgecolor=colors[y[i]], markerfacecolor='None', markersize=10, markeredgewidth=2)
    #plot(winning_node[1]+0.5)
    #plot(markers[y[i]]) #if the customer was approved or not, based on y vector of our dataset
    #plot(markeredgecolor = colors[y[i]], markerfacecolor = None, markersize = 10, markeredgewidth = 3)
show()
def stage_psfplots(T=None,
                   sedsn=None,
                   coimgs=None,
                   cons=None,
                   detmaps=None,
                   detivs=None,
                   blobsrcs=None,
                   blobflux=None,
                   blobslices=None,
                   blobs=None,
                   tractor=None,
                   cat=None,
                   targetrd=None,
                   pixscale=None,
                   targetwcs=None,
                   W=None,
                   H=None,
                   brickid=None,
                   bands=None,
                   ps=None,
                   tims=None,
                   plots=False,
                   **kwargs):

    tim = tims[0]
    tim.psfex.fitSavedData(*tim.psfex.splinedata)
    spl = tim.psfex.splines[0]
    print 'Spline:', spl
    knots = spl.get_knots()
    print 'knots:', knots
    tx, ty = knots
    k = 3
    print 'interior knots x:', tx[k + 1:-k - 1]
    print 'additional knots x:', tx[:k + 1], 'and', tx[-k - 1:]
    print 'interior knots y:', ty[k + 1:-k - 1]
    print 'additional knots y:', ty[:k + 1], 'and', ty[-k - 1:]

    for itim, tim in enumerate(tims):
        psfex = tim.psfex
        psfex.fitSavedData(*psfex.splinedata)
        if plots:
            print
            print 'Tim', tim
            print
            pp, xx, yy = psfex.splinedata
            ny, nx, nparams = pp.shape
            assert (len(xx) == nx)
            assert (len(yy) == ny)
            psfnil = psfex.psfclass(*np.zeros(nparams))
            names = psfnil.getParamNames()
            xa = np.linspace(xx[0], xx[-1], 50)
            ya = np.linspace(yy[0], yy[-1], 100)
            #xa,ya = np.meshgrid(xa,ya)
            #xa = xa.ravel()
            #ya = ya.ravel()
            print 'xa', xa
            print 'ya', ya
            for i in range(nparams):
                plt.clf()
                plt.subplot(1, 2, 1)
                dimshow(pp[:, :, i])
                plt.title('grid fit')
                plt.colorbar()
                plt.subplot(1, 2, 2)
                sp = psfex.splines[i](xa, ya)
                sp = sp.T
                print 'spline shape', sp.shape
                assert (sp.shape == (len(ya), len(xa)))
                dimshow(sp, extent=[xx[0], xx[-1], yy[0], yy[-1]])
                plt.title('spline')
                plt.colorbar()
                plt.suptitle('tim %s: PSF param %s' % (tim.name, names[i]))
                ps.savefig()
Exemple #54
0
def main():
    """
    In this simple tutorial example, the main function does all the work: 
    it sets the parameter values, creates and initializes a grid, sets up 
    the state variables, runs the main loop, and cleans up.
    """
    
    # INITIALIZE
    
    # User-defined parameter values
    dem_name = 'ExampleDEM/west_bijou_gully.asc'
    outlet_row = 6
    outlet_column = 38
    next_to_outlet_row = 7
    next_to_outlet_column = 38
    n = 0.06              # roughness coefficient (Manning's n)
    h_init = 0.001        # initial thin layer of water (m)
    g = 9.8               # gravitational acceleration (m/s2)
    alpha = 0.2           # time-step factor (ND; from Bates et al., 2010)
    run_time = 2400       # duration of run, seconds
    rainfall_mmhr = 100   # rainfall rate, in mm/hr
    rain_duration = 15*60 # rainfall duration, in seconds
    
    # Derived parameters
    rainfall_rate = (rainfall_mmhr/1000.)/3600.  # rainfall in m/s
    ten_thirds = 10./3.   # pre-calculate 10/3 for speed
    elapsed_time = 0.0    # total time in simulation
    report_interval = 5.  # interval to report progress (seconds)
    next_report = time.time()+report_interval   # next time to report progress
    DATA_FILE = os.path.join(os.path.dirname(__file__), dem_name)
    
    # Create and initialize a raster model grid by reading a DEM
    print('Reading data from "'+str(DATA_FILE)+'"')
    (mg, z) = read_esri_ascii(DATA_FILE)
    print('DEM has ' + str(mg.number_of_node_rows) + ' rows, ' +
            str(mg.number_of_node_columns) + ' columns, and cell size ' + str(mg.dx)) + ' m'
    
    # Modify the grid DEM to set all nodata nodes to inactive boundaries
    mg.set_nodata_nodes_to_closed(z, 0) # set nodata nodes to inactive bounds
    
    # Set the open boundary (outlet) cell. We want to remember the ID of the 
    # outlet node and the ID of the interior node adjacent to it. We'll make
    # the outlet node an open boundary.
    outlet_node = mg.grid_coords_to_node_id(outlet_row, outlet_column)
    node_next_to_outlet = mg.grid_coords_to_node_id(next_to_outlet_row, 
                                                    next_to_outlet_column)
    mg.set_fixed_value_boundaries(outlet_node)
    
    # Set up state variables
    h = mg.add_zeros('node', 'Water_depth') + h_init     # water depth (m)
    q = mg.create_active_link_array_zeros()       # unit discharge (m2/s)
    
    # Get a list of the core nodes
    core_nodes = mg.core_nodes
    
    # To track discharge at the outlet through time, we create initially empty
    # lists for time and outlet discharge.
    q_outlet = []
    t = []
    q_outlet.append(0.)
    t.append(0.)
    outlet_link = mg.get_active_link_connecting_node_pair(outlet_node, node_next_to_outlet)
        
    # Display a message
    print( 'Running ...' )
    start_time = time.time()
    
    # RUN
    
    # Main loop
    while elapsed_time < run_time:
        
        # Report progress
        if time.time()>=next_report:
            print('Time = '+str(elapsed_time)+' ('
                    +str(100.*elapsed_time/run_time)+'%)')
            next_report += report_interval
        
        # Calculate time-step size for this iteration (Bates et al., eq 14)
        dtmax = alpha*mg.dx/np.sqrt(g*np.amax(h))
        
        # Calculate the effective flow depth at active links. Bates et al. 2010
        # recommend using the difference between the highest water-surface
        # and the highest bed elevation between each pair of cells.
        zmax = mg.max_of_link_end_node_values(z)
        w = h+z   # water-surface height
        wmax = mg.max_of_link_end_node_values(w)
        hflow = wmax - zmax
        
        # Calculate water-surface slopes
        water_surface_slope = mg.calculate_gradients_at_active_links(w)
        
        # Calculate the unit discharges (Bates et al., eq 11)
        q = (q-g*hflow*dtmax*water_surface_slope)/ \
            (1.+g*hflow*dtmax*n*n*abs(q)/(hflow**ten_thirds))
        
        # Calculate water-flux divergence at nodes
        dqds = mg.calculate_flux_divergence_at_nodes(q)
        
        # Update rainfall rate
        if elapsed_time > rain_duration:
            rainfall_rate = 0.
        
        # Calculate rate of change of water depth
        dhdt = rainfall_rate-dqds
        
        # Second time-step limiter (experimental): make sure you don't allow
        # water-depth to go negative
        if np.amin(dhdt) < 0.:
            shallowing_locations = np.where(dhdt<0.)
            time_to_drain = -h[shallowing_locations]/dhdt[shallowing_locations]
            dtmax2 = alpha*np.amin(time_to_drain)
            dt = np.min([dtmax, dtmax2])
        else:
            dt = dtmax
        
        # Update the water-depth field
        h[core_nodes] = h[core_nodes] + dhdt[core_nodes]*dt
        h[outlet_node] = h[node_next_to_outlet]
        
        # Update current time
        elapsed_time += dt
        
        # Remember discharge and time
        t.append(elapsed_time)
        q_outlet.append(abs(q[outlet_link]))
        
        
    # FINALIZE
    
    # Set the elevations of the nodata cells to the minimum active cell
    # elevation (convenient for plotting)
    z[np.where(z<=0.)] = 9999            # temporarily change their elevs ...
    zmin = np.amin(z)                    # ... so we can find the minimum ...
    z[np.where(z==9999)] = zmin          # ... and assign them this value.
    
    # Get a 2D array version of the water depths and elevations

    # Clear previous plots
    pylab.figure(1)
    pylab.close()
    pylab.figure(2)
    pylab.close()
    
    # Plot discharge vs. time
    pylab.figure(1)
    pylab.plot(np.array(t), np.array(q_outlet)*mg.dx)
    pylab.xlabel('Time (s)')
    pylab.ylabel('Q (m3/s)')
    pylab.title('Outlet discharge')
    
    # Plot topography
    pylab.figure(2)
    pylab.subplot(121)
    imshow_grid(mg, z, allow_colorbar=False)
    pylab.xlabel(None)
    pylab.ylabel(None)
    im = pylab.set_cmap('RdBu')
    cb = pylab.colorbar(im)
    cb.set_label('Elevation (m)', fontsize=12)
    pylab.title('Topography')
    
    # Plot water depth
    pylab.subplot(122)
    imshow_grid(mg, h, allow_colorbar=False)
    im2 = pylab.set_cmap('RdBu')
    pylab.clim(0, 0.25)
    cb = pylab.colorbar(im2)
    cb.set_label('Water depth (m)', fontsize=12)
    pylab.title('Water depth')
    #
    ## Display the plots
    pylab.show()
    print('Done.')
    print('Total run time = '+str(time.time()-start_time)+' seconds.')
Exemple #55
0
            print("%0.3fs" % delta)
            omp[i_f, i_s] = delta

    results['time(LARS) / time(OMP)\n (w/ Gram)'] = (lars_gram / omp_gram)
    results['time(LARS) / time(OMP)\n (w/o Gram)'] = (lars / omp)
    return results


if __name__ == '__main__':
    samples_range = np.linspace(1000, 5000, 5).astype(np.int)
    features_range = np.linspace(1000, 5000, 5).astype(np.int)
    results = compute_bench(samples_range, features_range)
    max_time = max(np.max(t) for t in results.values())

    import pylab as pl
    fig = pl.figure('scikit-learn OMP vs. LARS benchmark results')
    for i, (label, timings) in enumerate(sorted(results.iteritems())):
        ax = fig.add_subplot(1, 2, i)
        vmax = max(1 - timings.min(), -1 + timings.max())
        pl.matshow(timings, fignum=False, vmin=1 - vmax, vmax=1 + vmax)
        ax.set_xticklabels([''] + map(str, samples_range))
        ax.set_yticklabels([''] + map(str, features_range))
        pl.xlabel('n_samples')
        pl.ylabel('n_features')
        pl.title(label)

    pl.subplots_adjust(0.1, 0.08, 0.96, 0.98, 0.4, 0.63)
    ax = pl.axes([0.1, 0.08, 0.8, 0.06])
    pl.colorbar(cax=ax, orientation='horizontal')
    pl.show()
Exemple #56
0
def plot_topo_file(topoplotdata):
    """
    Read in a topo or bathy file and produce a pcolor map.
    """

    import os
    import pylab
    from pyclaw.data import Data

    fname = topoplotdata.fname
    topotype = topoplotdata.topotype
    if topoplotdata.climits:
        # deprecated option
        cmin = topoplotdata.climits[0]
        cmax = topoplotdata.climits[1]
    else:
        cmin = topoplotdata.cmin
        cmax = topoplotdata.cmax
    figno = topoplotdata.figno
    addcolorbar = topoplotdata.addcolorbar
    addcontour = topoplotdata.addcontour
    contour_levels = topoplotdata.contour_levels
    xlimits = topoplotdata.xlimits
    ylimits = topoplotdata.ylimits
    coarsen = topoplotdata.coarsen
    imshow = topoplotdata.imshow
    gridedges_show = topoplotdata.gridedges_show
    neg_cmap = topoplotdata.neg_cmap
    pos_cmap = topoplotdata.pos_cmap
    print_fname = topoplotdata.print_fname

    if neg_cmap is None:
        neg_cmap = colormaps.make_colormap({
            cmin: [0.3, 0.2, 0.1],
            0: [0.95, 0.9, 0.7]
        })
    if pos_cmap is None:
        pos_cmap = colormaps.make_colormap({
            0: [.5, .7, 0],
            cmax: [.2, .5, .2]
        })

    if abs(topotype) == 1:

        X, Y, topo = topotools.topofile2griddata(fname, topotype)
        topo = pylab.flipud(topo)
        Y = pylab.flipud(Y)
        x = X[0, :]
        y = Y[:, 0]
        xllcorner = x[0]
        yllcorner = y[0]
        cellsize = x[1] - x[0]

    elif abs(topotype) == 3:

        file = open(fname, 'r')
        lines = file.readlines()
        ncols = int(lines[0].split()[0])
        nrows = int(lines[1].split()[0])
        xllcorner = float(lines[2].split()[0])
        yllcorner = float(lines[3].split()[0])
        cellsize = float(lines[4].split()[0])
        NODATA_value = int(lines[5].split()[0])

        print "Loading file ", fname
        print "   nrows = %i, ncols = %i" % (nrows, ncols)
        topo = pylab.loadtxt(fname, skiprows=6, dtype=float)
        print "   Done loading"

        if 0:
            topo = []
            for i in range(nrows):
                topo.append(pylab.array(lines[6 + i], ))
            print '+++ topo = ', topo
            topo = pylab.array(topo)

        topo = pylab.flipud(topo)

        x = pylab.linspace(xllcorner, xllcorner + ncols * cellsize, ncols)
        y = pylab.linspace(yllcorner, yllcorner + nrows * cellsize, nrows)
        print "Shape of x, y, topo: ", x.shape, y.shape, topo.shape

    else:
        raise Exception("*** Only topotypes 1 and 3 supported so far")

    if coarsen > 1:
        topo = topo[slice(0, nrows, coarsen), slice(0, ncols, coarsen)]
        x = x[slice(0, ncols, coarsen)]
        y = y[slice(0, nrows, coarsen)]
        print "Shapes after coarsening: ", x.shape, y.shape, topo.shape

    if topotype < 0:
        topo = -topo

    if figno:
        pylab.figure(figno)

    if topoplotdata.imshow:
        color_norm = Normalize(cmin, cmax, clip=True)
        xylimits = (x[0], x[-1], y[0], y[-1])
        #pylab.imshow(pylab.flipud(topo.T), extent=xylimits, \
        pylab.imshow(pylab.flipud(topo), extent=xylimits, \
                cmap=cmap, interpolation='nearest', \
                norm=color_norm)
    else:
        neg_topo = ma.masked_where(topo > 0., topo)
        all_masked = (ma.count(neg_topo) == 0)
        if not all_masked:
            pylab.pcolormesh(x, y, neg_topo, cmap=neg_cmap)
            pylab.clim([cmin, 0])
            if addcolorbar:
                pylab.colorbar()

        pos_topo = ma.masked_where(topo < 0., topo)
        all_masked = (ma.count(pos_topo) == 0)
        if not all_masked:
            pylab.pcolormesh(x, y, pos_topo, cmap=pos_cmap)
            pylab.clim([0, cmax])
            if addcolorbar:
                pylab.colorbar()

    pylab.axis('scaled')

    if addcontour:
        pylab.contour(x, y, topo, levels=contour_levels, colors='k')

    if gridedges_show:
        pylab.plot([x[0], x[-1]], [y[0], y[0]], 'k')
        pylab.plot([x[0], x[-1]], [y[-1], y[-1]], 'k')
        pylab.plot([x[0], x[0]], [y[0], y[-1]], 'k')
        pylab.plot([x[-1], x[-1]], [y[0], y[-1]], 'k')

    if print_fname:
        fname2 = os.path.splitext(fname)[0]
        pylab.text(xllcorner + cellsize,
                   yllcorner + cellsize,
                   fname2,
                   color='m')

    topodata = Data()
    topodata.x = x
    topodata.y = y
    topodata.topo = topo

    return topodata
Exemple #57
0
import pylab as plt
import numpy as np

a = np.arange(-4, 4, 0.1)
x, y = np.meshgrid(a, a)
matrix = np.sin(x**2 + y**2)

plt.figure()

plt.imshow(matrix, cmap=plt.cm.plasma)  # gray, hot, magma, ..
plt.colorbar()

plt.title("Heatmap der Funktion $\sin{(x^2 + y^2)}$")

plt.savefig("heatmap.png")
plt.savefig("heatmap.svg")
Exemple #58
0
mags = []
for cnt, t in enumerate(times):
    print 'Working on integration %i of %i' % (cnt + 1, obs_duration)
    for i in xrange(nants):
        #print i
        for j in xrange(nants):
            if i == j: continue  #don't bother with autocorrelations
            #if i > j: continue #don't double count
            aa.set_jultime(t)
            lst = aa.sidereal_time()
            obs_zen.compute(aa)
            u, v, w = aa.gen_uvw(i, j, src=obs_zen)
            _beam = beamgridder(sigma=sigma,
                                xcen=u / dish_size_in_lambda,
                                ycen=v / dish_size_in_lambda,
                                size=dim)
            #print sigma, u/dish_size_in_lambda,v/dish_size_in_lambda, dim
            uv += _beam

#uv[:,:dim/2] = 0
#uv[dim/2:,dim/2] = 0

n.save('uvcov.npy', uv)

print 'there are %i minutes of integration in the uv plane' % n.sum(uv)

p.imshow(uv, interpolation='nearest')
p.colorbar()
p.show()
#本例红色被关联为最高的正相关,可以看出最强相关是:
#“花瓣宽度”petal width和“花瓣长度”petal length这两个变量。

from numpy import corrcoef
corr = corrcoef(data.T)  # .T gives the transpose
print(corr)
#output:[[ 1.         -0.10936925  0.87175416  0.81795363]
#output: [-0.10936925  1.         -0.4205161  -0.35654409]
#output: [ 0.87175416 -0.4205161   1.          0.9627571 ]
#output: [ 0.81795363 -0.35654409  0.9627571   1.        ]]

from pylab import pcolor, colorbar, xticks, yticks
from numpy import arange
pcolor(corr)  #添加相关性矩阵,4个属性所以是4x4
colorbar()  #添加彩色注释条
#添加X,Y轴注释,默认一个属性是1,坐标是1,2,3,4,对应四个属性name如下。
xticks(arange(1, 5),
       ['sepal length', 'sepal width', 'petal length', 'petal width'],
       rotation=-20)
yticks(arange(1, 5),
       ['sepal length', 'sepal width', 'petal length', 'petal width'],
       rotation=-45)
show()

###########################
#(6)成分分析(降维)
# 涉及算法之一PCA
###########################

from sklearn.decomposition import PCA
Exemple #60
0
bg = nibabel.load(os.path.join('bg.nii.gz'))

pl.figure(figsize=(8, 8))
ax1 = pl.axes([0., 0., 1., 1.])
pl.imshow(bg.get_data()[:, :, 10].T,
          interpolation="nearest",
          cmap='gray',
          origin='lower')
pl.imshow(np.ma.masked_less(sbrain_ridge[:, :, 10].T, 1e-6),
          interpolation="nearest",
          cmap='hot',
          origin="lower")
plot_lines(contour[:, :, 10].T)
pl.axis('off')
ax2 = pl.axes([.08, .5, .05, .47])
cb = pl.colorbar(cax=ax2, ax=ax1)
cb.ax.yaxis.set_ticks_position('left')
cb.ax.yaxis.set_tick_params(labelcolor='white')
cb.ax.yaxis.set_tick_params(labelsize=20)
cb.set_ticks(np.arange(0., .8, .2))
pl.savefig(os.path.join('output', 'encoding_scores_ridge.pdf'))
pl.savefig(os.path.join('output', 'encoding_scores_ridge.png'))
pl.savefig(os.path.join('output', 'encoding_scores_ridge.eps'))
pl.clf()

sbrain_lasso = masking.unmask(np.array(scores_lasso).mean(0), dataset.mask)

pl.figure(figsize=(8, 8))
ax1 = pl.axes([0., 0., 1., 1.])
pl.imshow(bg.get_data()[:, :, 10].T,
          interpolation="nearest",