Example #1
0
def plot(y, function):
    """ Show an animation of Poincare plot.

    --- arguments ---
    y: A list of initial values
    function: function which is argument of Runge-Kutta solver
    """
    h = dt
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.grid()
    time_text = ax.text(0.05, 0.9, '', transform=ax.transAxes)
    plt.ion()

    for i in range(nmax + 1):
        for j in range(nstep):
            rk4 = RK.RK4(function)
            y = rk4.solve(y, j * h, h)
            # -pi <= theta <= pi
            while y[0] > pi:
                y[0] = y[0] - 2 * pi
            while y[0] < -pi:
                y[0] = y[0] + 2 * pi

        if ntransient <= i < nmax:          # <-- draw the poincare plots
            plt.scatter(y[0], y[1], s=2.0, marker='o', color='blue')
            time_text.set_text('n = %d' % i)
            plt.draw()

        if i == nmax:                       # <-- to stop the interactive mode
            plt.ioff()
            plt.scatter(y[0], y[1], s=2.0, marker='o', color='blue')
            time_text.set_text('n = %d' % i)
            plt.show()
Example #2
0
   def __call__(self, **params):

       p = ParamOverrides(self, params)
       fig = plt.figure(figsize=(5, 5))

       # This one-liner works in Octave, but in matplotlib it
       # results in lines that are all connected across rows and columns,
       # so here we plot each line separately:
       #   plt.plot(x,y,"k-",transpose(x),transpose(y),"k-")
       # Here, the "k-" means plot in black using solid lines;
       # see matplotlib for more info.
       isint = plt.isinteractive() # Temporarily make non-interactive for
       # plotting
       plt.ioff()
       for r, c in zip(p.y[::p.skip], p.x[::p.skip]):
           plt.plot(c, r, "k-")
       for r, c in zip(np.transpose(p.y)[::p.skip],np.transpose(p.x)[::p.skip]):
           plt.plot(c, r, "k-")

       # Force last line avoid leaving cells open
       if p.skip != 1:
           plt.plot(p.x[-1], p.y[-1], "k-")
           plt.plot(np.transpose(p.x)[-1], np.transpose(p.y)[-1], "k-")

       plt.xlabel('x')
       plt.ylabel('y')
       # Currently sets the input range arbitrarily; should presumably figure out
       # what the actual possible range is for this simulation (which would presumably
       # be the maximum size of any GeneratorSheet?).
       plt.axis(p.axis)

       if isint: plt.ion()
       self._generate_figure(p)
       return fig
Example #3
0
    def test1():
        x = [0.5]*3
        xbounds = [(-5, 5) for y in x]


        GA = GenAlg(fitcalc1, x, xbounds, popMult=100, bitsPerGene=9, mutation=(1./9.), crossover=0.65, crossN=2, direction='min', maxGens=60, hammingDist=False)
        results = GA.run()
        print "*** DONE ***"
        #print results
        plt.ioff()
        #generate pareto frontier numerically
        x1_ = np.arange(-5., 0., 0.05)
        x2_ = np.arange(-5., 0., 0.05)
        x3_ = np.arange(-5., 0., 0.05)

        pfn = []
        for x1 in x1_:
            for x2 in x2_:
                for x3 in x3_:
                    pfn.append(fitcalc1([x1,x2,x3]))

        pfn.sort(key=lambda x:x[0])
        
        plt.figure()
        i = 0
        for x in results:
            plt.scatter(x[1][0], x[1][1], 20, c='r')

        plt.scatter([x[0] for x in pfn], [x[1] for x in pfn], 1.0, c='b', alpha=0.1)
        plt.xlim([-20,-1])
        plt.ylim([-12, 2])
        plt.draw()
Example #4
0
def plot_average(filenames, save_plot=True, show_plot=False, dpi=100):

    ''' Plot Signal average from a list of averaged files. '''

    fname = get_files_from_list(filenames)

    # plot averages
    pl.ioff()  # switch off (interactive) plot visualisation
    factor = 1e15
    for fnavg in fname:
        name = fnavg[0:len(fnavg) - 4]
        basename = os.path.splitext(os.path.basename(name))[0]
        print fnavg
        # mne.read_evokeds provides a list or a single evoked based on condition.
        # here we assume only one evoked is returned (requires further handling)
        avg = mne.read_evokeds(fnavg)[0]
        ymin, ymax = avg.data.min(), avg.data.max()
        ymin *= factor * 1.1
        ymax *= factor * 1.1
        fig = pl.figure(basename, figsize=(10, 8), dpi=100)
        pl.clf()
        pl.ylim([ymin, ymax])
        pl.xlim([avg.times.min(), avg.times.max()])
        pl.plot(avg.times, avg.data.T * factor, color='black')
        pl.title(basename)

        # save figure
        fnfig = os.path.splitext(fnavg)[0] + '.png'
        pl.savefig(fnfig, dpi=dpi)

    pl.ion()  # switch on (interactive) plot visualisation
    def matrix_plot(self, matrix, figure_name='matrix_plot.pdf'):
        import numpy
        from matplotlib import pylab
        def _blob(x,y,area,colour):
            hs = numpy.sqrt(area) / 2
            xcorners = numpy.array([x - hs, x + hs, x + hs, x - hs])
            ycorners = numpy.array([y - hs, y - hs, y + hs, y + hs])
            pylab.fill(xcorners, ycorners, colour, edgecolor=colour)
        reenable = False
        if pylab.isinteractive():
            pylab.ioff()
        pylab.clf()
        
        maxWeight = 2**numpy.ceil(numpy.log(numpy.max(numpy.abs(matrix)))/numpy.log(2))
        height, width = matrix.shape
        pylab.fill(numpy.array([0,width,width,0]),numpy.array([0,0,height,height]),'white')
        pylab.axis('off')
        pylab.axis('equal')
        for x in xrange(width):
            for y in xrange(height):
                _x = x+1
                _y = y+1
                w = matrix[y,x]
                if w > 0:
                    _blob(_x - 0.5, height - _y + 0.5, 0.2,'#0099CC')
                elif w < 0:
                    _blob(_x - 0.5, height - _y + 0.5, 0.2,'#660000')

        if reenable:
            pylab.ion()
        pylab.savefig(figure_name) 
Example #6
0
def kmr_test_plot(data, k, end_thresh):
    from matplotlib.pylab import ion, figure, draw, ioff, show, plot, cla
    ion()
    fig = figure()
    ax = fig.add_subplot(111)
    ax.grid(True)

    # get k centroids
    kmr = kmeans.kmeans_runner(k, end_thresh)
    kmr.init_data(data)
    print kmr.centroids

    plot(data[:,0], data[:,1], 'o')

    i = 0
    while kmr.stop_flag is False:
        kmr.iterate()
        #print kmr.centroids, kmr.itr_count
        plot(kmr.centroids[:, 0], kmr.centroids[:, 1], 'sr')
        time.sleep(.2)
        draw()
        i += 1

    print "N Iterations: %d" % (i)
    plot(kmr.centroids[:, 0], kmr.centroids[:, 1], 'g^', linewidth=3)

    ioff()
    show()
    print kmr.itr_count, kmr.centroids
Example #7
0
def CalculateG(distances):
    
    #distances = distances[:len(distances)/2]

    x = []
    y = []

    for key, value in distances.items():
        x.append(value[0])
        y.append(value[1])
    
    #print(str(x))
    #print(str(y))
    
    fig = plt.figure()
    ax = fig.add_subplot(111)

    p = ax.plot(x, y, 'b')
    ax.set_xlabel('t')
    ax.set_ylabel('s')
    ax.set_title('Simple XY point plot')

    pylab.ioff()
    plt.show()

    def s(t, a):
        return 0.5 * a * t**2
    
    params = curve_fit(s, x, y)
    
    #print(str(params[0]))

    return params[0][0]
	def print_plot(self, title):
		'''
		Outputs the current grid to a .png file
		'''
		plt.ioff()
		fig, axs = plt.subplots()

		#Default extrema values for x & y dimension
		max_x, min_x = 1, -1
		max_y, min_y = 1, -1

		for brick in self.plane.grid:
			#Draw the brick
			axs.add_patch(Rectangle((brick[0].x, brick[0].y), brick[0].n, brick[0].h))

			#Find extrema
			for pos in brick[0].pos:
				max_x = max([max_x, pos[0]])
				min_x = min([min_x, pos[0]])
				max_y = max([max_y, pos[1]])
				min_y = min([min_y, pos[1]])
		if len(self.plane.grid) > 1:
			plt.title('Chromosome - f=%.3f' %self.eval_func())
		else:
			plt.title('Chromosome - f=000')

		#Create buffer around edge of drawing in the graph
		axs.set_xlim(min_x - 2.5, max_x + 5.0)
		axs.set_ylim(min_y - 2.5, max_y + 5.0)

		fig.savefig(str(title) + '.png')
		plt.close(fig)
Example #9
0
    def __init__(self, folder, **kwargs):  
        
        if not os.path.isdir(os.path.join(folder, 'plots')):
            os.mkdir(os.path.join(folder, 'plots'))
        plt.ioff()
        self.metrics_fig = plt.figure('Metrics')
        self.ax2 = self.metrics_fig.add_subplot(111)

        self.p1, = self.ax2.plot([], [], 'ro-', label='TEST: Pixel accuracy')
        self.p5, = self.ax2.plot([], [], 'rv-', label='TRAIN: Pixel accuracy')
        
        self.p2, = self.ax2.plot([], [], 'bo-', label='TEST: Mean-Per-Class accuracy')
        self.p6, = self.ax2.plot([], [], 'bv-', label='TRAIN:Mean-Per-Class accuracy')
        
        self.p3, = self.ax2.plot([], [], 'go-', label='TEST: Mean-Per-Class IU')
        self.p7, = self.ax2.plot([], [], 'gv-', label='TRAIN:Mean-Per-Class IU')
        
        self.p4, = self.ax2.plot([], [], 'ko-', label='TEST: Freq. weigh. mean IU')
        self.p8, = self.ax2.plot([], [], 'kv-', label='TRAIN:Freq. weigh. mean IU')
        


        plt.xlabel('iterations')
        self.handles2, self.labels2 = self.ax2.get_legend_handles_labels()
        self.lgd2 = self.ax2.legend(self.handles2, self.labels2, loc='upper center', bbox_to_anchor=(0.5,-0.2))
        self.ax2.grid(True)    
        plt.draw()
Example #10
0
def report(): 
    from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
    assert_data()
    df = session['df']
    
    # For example, user could sumbit 2013-10:2014-02 but we need to make this
    #   into '2013-10':'2014-10'
    if 'idx' in session.keys() and len(session['idx'])>0:
        session['_filter']
        idx = session['idx']
        if idx.find(':') > -1:
            lidx, ridx = idx.split(':')
            df = df[lidx:ridx]
        else:
            df = df[idx]
            
    
    startDate = session['startDate']
    endDate = session['endDate']
    if startDate != '' and endDate != '':
        # Filter the data frame to only be a subset of full time range
        startDate = pandas.Timestamp(startDate)
        endDate = pandas.Timestamp(endDate)
        df = df[startDate:endDate]
        
    
    figures = []
    if 'tags' in session.keys() and len(session['tags'])>0:
        figures += GBA.density_cloud_by_tags(df, session['tags'], 
                                            silent=True)
                                            
    if 'pnodes' in session.keys() and len(session['pnodes'])>0:
        import matplotlib.pylab as plt
        plt.ioff()
        
        pnodes = session['pnodes']
        df = GBA.price_at_pnodes(df, pnodes)
        cols = ['COST',] + ['pnode_'+p for p in pnodes]
        figures.append(df[cols].plot().figure)        
        figures.append(df[cols].cumsum().plot().figure)
        
    session.drop('tags')
    s = '<h1>Figures</h1>'
    figures_rendered = []
    for n, fig in enumerate(figures):
        s+='<img src="plt/%d.png" /><br />' % n
        canvas=FigureCanvas(fig)
        png_output = StringIO()
        canvas.print_png(png_output)
        figures_rendered.append(png_output.getvalue())
    session['figures'] = figures_rendered
    s += '<p><a href="/dashboard">Back to dashboard</a></p><br /><br />'
    return s
Example #11
0
def plot_model(galaxy, sect = [x1, x2, y1, y2], directory = '/Volumes/VINCE/dwarfs/combined_VCC/figures/'):
    '''
    A wrapper to plot galfit results, bad pixel mask and other info
    
    INPUT
    'galaxy': Single row of the dataframe produced by the procedure the for loop
              below
    'sect'  : Image sector produced by sector(header). It is the same for all 
              galaxies. Do not need to call sector(header) every time.
    'directory': Where to save the results. Directory needs to be created
    '''
    plt.ioff()
    fig, axarr = plt.subplots(2,3)
    #fig.suptitle('{}'.format(f))

    hdu = fits.open(galaxy['MODEL'])
    image = ndimage.gaussian_filter(hdu[1].data, 1)
    axarr[0, 0].imshow(image, cmap='gray', norm=LogNorm(), vmin=1, vmax = 50)
    axarr[0, 0].set_title('Image')

    model = hdu[2].data
    axarr[0, 1].imshow(model, cmap='Blues', norm=LogNorm(), vmin=0.01, vmax = 6)
    axarr[0, 1].set_title('Model')

    residuals = ndimage.gaussian_filter(hdu[3].data, 1)
    axarr[1, 0].imshow(residuals, cmap='gray', norm=LogNorm(), vmin=1, vmax = 50)
    axarr[1, 0].set_title('Residuals')

    x1, x2, y1, y2 = sect[0], sect[1], sect[2], sect[3]
    themask = themask = getdata(galaxy['MASK'])[x1:x2,y1:y2]
    axarr[1, 1].imshow(themask, cmap='gray', vmin=0, vmax = 1)
    axarr[1, 1].set_title('Mask')
    
    axarr[0, 2].text(0.2, 1.0,r'Galaxy: VCC{}'.format(galaxy['ID']), va="center", ha="left")
    axarr[0, 2].text(0.2, 0.9,r'mtot $=$ {}'.format(galaxy['mtot']), va="center", ha="left")
    axarr[0, 2].text(0.2, 0.8,r'Re $=$ {} pc'.format(galaxy['Re']), va="center", ha="left")
    axarr[0, 2].text(0.2, 0.7,r'n $=$ {}'.format(galaxy['n']), va="center", ha="left")
    axarr[0, 2].text(0.2, 0.6,r'PA $=$ {}'.format(galaxy['PA']), va="center", ha="left")
    axarr[0, 2].text(0.2, 0.5,r'chi2nu $=$ {}'.format(galaxy['chi2nu']), va="center", ha="left")
    
    axarr[0, 2].set_aspect('equal')
    axarr[0, 2].axis('off')

    axarr[1, 2].set_aspect('equal')
    axarr[1, 2].axis('off')

    fig.subplots_adjust(hspace=0., wspace = 0.)
    plt.setp([a.get_xticklabels() for a in axarr.flatten()], visible=False);
    plt.setp([a.get_yticklabels() for a in axarr.flatten()], visible=False);

    fig.savefig(directory + '{}.png'.format(f.split('/')[-1]), dpi= 200)
    plt.close(fig)
Example #12
0
 def __init__(self, folder, **kwargs):
 
     if not os.path.isdir(os.path.join(folder, 'plots')):
         os.mkdir(os.path.join(folder, 'plots'))
     plt.ioff()
     self.loss_fig = plt.figure('Loss')  
     self.ax1 = self.loss_fig.add_subplot(111)
     self.p0, = self.ax1.plot([], [], 'm-', label='Train loss')
     self.p1, = self.ax1.plot([], [], 'c-', label='Test loss')
     plt.xlabel('iterations')
     self.handles1, self.labels1 = self.ax1.get_legend_handles_labels()
     self.lgd1 = self.ax1.legend(self.handles1, self.labels1, loc='upper center', bbox_to_anchor=(0.5,-0.2))   
     self.ax1.grid(True)
     plt.draw()
Example #13
0
 def __init__(self,stats):
     
     statsA = stats.expOne        
     uni = stats.titOne
     rc('xtick', labelsize=12) 
     rc('ytick', labelsize=12) 
     fig1 = pylab.figure(figsize=(8,5), dpi=100)   
     self.plotCurve(fig1,statsA,len(statsA[0]),"Coverage" ,1)     
     pylab.ioff()
     #to use if needed
     #mytime = '%.2f' % time()
     mytime = ""
     fig1.savefig(os.environ['TEX']+uni+mytime+'.pdf')        
     # display plot if required
     pylab.show()         
Example #14
0
    def plot(self, func, interp=True, plotter='imshow'):
        import matplotlib as mpl
        from matplotlib import pylab as pl
        if interp:
            lpi = self.interpolator(func)
            z = lpi[self.yrange[0]:self.yrange[1]:complex(0, self.nrange),
                    self.xrange[0]:self.xrange[1]:complex(0, self.nrange)]
        else:
            y, x = np.mgrid[
                self.yrange[0]:self.yrange[1]:complex(0, self.nrange),
                self.xrange[0]:self.xrange[1]:complex(0, self.nrange)]
            z = func(x, y)

        z = np.where(np.isinf(z), 0.0, z)

        extent = (self.xrange[0], self.xrange[1],
            self.yrange[0], self.yrange[1])
        pl.ioff()
        pl.clf()
        pl.hot()  # Some like it hot
        if plotter == 'imshow':
            pl.imshow(np.nan_to_num(z), interpolation='nearest', extent=extent,
                      origin='lower')
        elif plotter == 'contour':
            Y, X = np.ogrid[
                self.yrange[0]:self.yrange[1]:complex(0, self.nrange),
                self.xrange[0]:self.xrange[1]:complex(0, self.nrange)]
            pl.contour(np.ravel(X), np.ravel(Y), z, 20)
        x = self.x
        y = self.y
        lc = mpl.collections.LineCollection(
            np.array([((x[i], y[i]), (x[j], y[j]))
                      for i, j in self.tri.edge_db]),
            colors=[(0, 0, 0, 0.2)])
        ax = pl.gca()
        ax.add_collection(lc)

        if interp:
            title = '%s Interpolant' % self.name
        else:
            title = 'Reference'
        if hasattr(func, 'title'):
            pl.title('%s: %s' % (func.title, title))
        else:
            pl.title(title)

        pl.show()
        pl.ion()
Example #15
0
 def __init__(self, folder, title='', **kwargs):  
     
     if not os.path.isdir(os.path.join(folder, 'plots')):
         os.mkdir(os.path.join(folder, 'plots'))
     plt.ioff()
     self.title = title    
     self.metrics_fig = plt.figure(self.title + 'Metrics')
     self.ax2 = self.metrics_fig.add_subplot(111)
     self.p1, = self.ax2.plot([], [], 'r-', label='Av Pixel accuracy')
     self.p2, = self.ax2.plot([], [], 'b-', label='Av Mean-Per-Class accuracy')
     self.p3, = self.ax2.plot([], [], 'g-', label='Av Mean-Per-Class IU')
     self.p4, = self.ax2.plot([], [], 'k-', label='Av Freq. weigh. mean IU')
     plt.xlabel('iterations')
     self.handles2, self.labels2 = self.ax2.get_legend_handles_labels()
     self.lgd2 = self.ax2.legend(self.handles2, self.labels2, loc='upper center', bbox_to_anchor=(0.5,-0.2))
     self.ax2.grid(True)    
     plt.draw()
Example #16
0
 def __init__(self, net, folder, **kwargs):
 
     if not os.path.isdir(os.path.join(folder, 'plots')):
         os.mkdir(os.path.join(folder, 'plots'))
     plt.ioff()
     self.weights_fig = plt.figure('Weights')  
     self.ax1 = self.weights_fig.add_subplot(111)
     self.plots = {}
     i = 0        
     for p in net.params:
         self.plots['p'+str(i)] = self.ax1.plot([], [], '-', label=p+', <|W|>')[0] # MIND: needs unpacking
         i += 1
     plt.xlabel('iterations')
     self.handles1, self.labels1 = self.ax1.get_legend_handles_labels()
     self.lgd1 = self.ax1.legend(self.handles1, self.labels1, loc='upper center', bbox_to_anchor=(0.5,-0.2))   
     self.ax1.grid(True)
     plt.draw()  
def plot(magfile):

    rapert,flux = readaper(magfile)
    
    plt.ioff() #turn off interactive so plots dont pop up 
    plt.figure(figsize=(8,10)) #this is in inches
    
    
    plt.xlabel('radius')
    plt.ylabel('total flux')
    plt.plot(rapert,flux,'bx')
    plt.title(magfile,{'fontsize':10})

    plt.title("Total flux collected per aperture")
        
    outfile=magfile+".pdf"
    plt.savefig(outfile)
    print(("saved output figure to %s")%(outfile))
Example #18
0
def get_rp_as_imagebuf(features, width=493, height=352, dpi=72, cmap="jet"):

    features = features.reshape(24, 60, order="F")

    plt.ioff()
    fig = plt.figure(figsize=(int(width / dpi), int(height / dpi)), dpi=dpi)
    ax = fig.add_subplot(111)
    fig.suptitle("Rhythm Patterns")
    ax.imshow(features, origin="lower", aspect="auto", interpolation="nearest", cmap=cmap)
    ax.set_xlabel("Mod. Frequency Index")
    ax.set_ylabel("Frequency [Bark]")

    img_buffer = io.BytesIO()
    plt.savefig(img_buffer, format="png")
    img_buffer.seek(0)
    plt.close()
    plt.ion()
    return base64.b64encode(img_buffer.getvalue())
Example #19
0
    def plot( self ):
        """
        """
        from matplotlib import pylab as pl
        from asap import selector
        from asap._asap import srctype as st
        pl.clf()

        # result as a scantable
        s = self.getresult()

        # ON scan
        sel = selector()
        sel.set_types( int(st.pson) )
        s.set_selection( sel )
        diron = numpy.array( s.get_directionval() ).transpose()
        diron[0] = rotate( diron[0] )
        s.set_selection()
        sel.reset()

        # OFF scan
        sel.set_types( int(st.psoff) )
        s.set_selection( sel )
        diroff = numpy.array( s.get_directionval() ).transpose()
        diroff[0] = rotate( diroff[0] )
        s.set_selection()
        sel.reset()
        del s
        del sel

        # plot
        pl.ioff()
        ax=pl.axes()
        ax.set_aspect(1.0)
        pl.plot( diron[0], diron[1], '.', color='blue', label='ON' )
        pl.plot( diroff[0], diroff[1], '.', color='green', label='OFF' )
        [xmin,xmax,ymin,ymax] = pl.axis()
        pl.axis([xmax,xmin,ymin,ymax])
        pl.legend(loc='best',prop={'size':'small'},numpoints=1)
        pl.xlabel( 'R.A. [rad]' )
        pl.ylabel( 'Declination [rad]' )
        pl.title( 'edgemarker result' )
        pl.ion()
        pl.draw()
Example #20
0
def plot_pro(out_name, x1array, y1array, x2array, y2array, ts_min, linefit, yrange):

    import matplotlib.pylab as plt
    from numpy import amin, arange,zeros, size
    
    # Turn interactive plotting off
    plt.ioff()
    fig=plt.figure()
    
    #set up the plot and do a scatterplot of x1array, y1array
    # do scatterplotts of the other arrays if they are present
    plt.plot(x1array,y1array, 'o',markersize=1, alpha=0.5, markeredgecolor='black', color='white')
    if size(x2array) >0 and size(y2array) > 0: plt.plot(x2array, y2array,'^', markersize=6, color='red')
    plt.xlabel('NDVI')
    plt.ylabel('TS')
    plt.title('Triangle')
    plt.ylim(yrange)
    
    # plot Dry edge
    x_dummy=arange(20)/19.0
    # scale the line to go through the whole range of x
    x_min=amin(x1array)
    x_dummy = x_dummy*(1. - x_min) + x_min
    y = linefit[1] + x_dummy*linefit[0]
    plt.plot(x_dummy,y,'k-', lw=2)
    if linefit[0]>0:
        string='y='+str(linefit[1])+ '+' +str(linefit[0])+'*x'
    else:
        string='y='+str(linefit[1])+ str(linefit[0])+'*x'
    label_pos_x=0.2
    label_pos_y=yrange[0]+0.9*(yrange[1]-yrange[0])
    
    
    # plot ts_min
    x_dummy=[x_min, 1]
    if ts_min > 0:
        plt.plot(x_dummy, zeros(2)+ts_min,'k-', lw=2)
        string=string+'\n TS min = '+str(ts_min)
    
    plt.text(label_pos_x,label_pos_y,string, color='red')
    # Save the file
    plt.savefig(out_name)
    plt.close(fig)
Example #21
0
File: misc.py Project: aerler/GeoPy
def loadMPL(linewidth=None, mplrc=None, backend='QT4Agg', lion=False):
  import matplotlib as mpl
  mpl.use(backend) # enforce QT4
  import matplotlib.pylab as pyl
  # some custom defaults  
  if linewidth is not None:
    mpl.rc('lines', linewidth=linewidth)
    if linewidth == 1.5: mpl.rc('font', size=12)
    elif linewidth == .75: mpl.rc('font', size=8)
    else: mpl.rc('font', size=10)
  # apply rc-parameters from dictionary (override custom defaults)
  if (mplrc is not None) and isinstance(mplrc,dict):
    # loop over parameter groups
    for (key,value) in mplrc.iteritems():
      mpl.rc(key,**value)  # apply parameters
  # prevent figures from closing: don't run in interactive mode, or pyl.show() will not block
  if lion: pyl.ion()
  else: pyl.ioff()
  # return matplotlib instance with new parameters
  return mpl, pyl
Example #22
0
def plotallfuncs(allfuncs=allfuncs):
    from matplotlib import pylab as pl
    pl.ioff()
    nnt = NNTester(npoints=1000)
    lpt = LinearTester(npoints=1000)
    for func in allfuncs:
        print(func.title)
        nnt.plot(func, interp=False, plotter='imshow')
        pl.savefig('%s-ref-img.png' % func.func_name)
        nnt.plot(func, interp=True, plotter='imshow')
        pl.savefig('%s-nn-img.png' % func.func_name)
        lpt.plot(func, interp=True, plotter='imshow')
        pl.savefig('%s-lin-img.png' % func.func_name)
        nnt.plot(func, interp=False, plotter='contour')
        pl.savefig('%s-ref-con.png' % func.func_name)
        nnt.plot(func, interp=True, plotter='contour')
        pl.savefig('%s-nn-con.png' % func.func_name)
        lpt.plot(func, interp=True, plotter='contour')
        pl.savefig('%s-lin-con.png' % func.func_name)
    pl.ion()
Example #23
0
def report(): 
    from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
    assert_data()
    df = session['df']
    
    if 'idx' in session.keys() and len(session['idx'])>0:
        idx = session['idx']
        if idx.find(':') > -1:
            lidx, ridx = idx.split(':')
            df = df[lidx:ridx]
        else:
            df = df[idx]
    
    figures = []
    if 'tags' in session.keys() and len(session['tags'])>0:
        figures += GBA.density_cloud_by_tags(df, session['tags'], 
                                            silent=True)
                                            
    if 'pnodes' in session.keys() and len(session['pnodes'])>0:
        import matplotlib.pylab as plt
        plt.ioff()
        
        pnodes = session['pnodes']
        df = GBA.price_at_pnodes(df, pnodes)
        cols = ['COST',] + ['pnode_'+p for p in pnodes]
        figures.append(df[cols].plot().figure)        
        figures.append(df[cols].cumsum().plot().figure)
        
    session.drop('tags')
    s = '<h1>Figures</h1>'
    figures_rendered = []
    for n, fig in enumerate(figures):
        s+='<img src="plt/%d.png" /><br />' % n
        canvas=FigureCanvas(fig)
        png_output = StringIO()
        canvas.print_png(png_output)
        figures_rendered.append(png_output.getvalue())
    session['figures'] = figures_rendered
    s += '<p><a href="/dashboard">Back to dashboard</a></p><br /><br />'
    return s
Example #24
0
   def __call__(self, **params):

       p=ParamOverrides(self,params)
       name=p.plot_template.keys().pop(0)
       plot=make_template_plot(p.plot_template,
                               p.sheet.views.Maps, p.sheet.xdensity,p.sheet.bounds,
                               p.normalize,name=p.plot_template[name])
       fig = plt.figure(figsize=(5,5))
       if plot:
           bitmap=plot.bitmap
           isint=plt.isinteractive() # Temporarily make non-interactive for plotting
           plt.ioff()                                         # Turn interactive mode off

           plt.imshow(bitmap.image,origin='lower',interpolation='nearest')
           plt.axis('off')

           for (t,pref,sel,c) in p.overlay:
               v = plt.flipud(p.sheet.views.Maps[pref].view()[0])
               if (t=='contours'):
                   plt.contour(v,[sel,sel],colors=c,linewidths=2)

               if (t=='arrows'):
                   s = plt.flipud(p.sheet.views.Maps[sel].view()[0])
                   scale = int(np.ceil(np.log10(len(v))))
                   X = np.array([x for x in xrange(len(v)/scale)])
                   v_sc = np.zeros((len(v)/scale,len(v)/scale))
                   s_sc = np.zeros((len(v)/scale,len(v)/scale))
                   for i in X:
                       for j in X:
                           v_sc[i][j] = v[scale*i][scale*j]
                           s_sc[i][j] = s[scale*i][scale*j]
                   plt.quiver(scale*X, scale*X, -np.cos(2*np.pi*v_sc)*s_sc,
                              -np.sin(2*np.pi*v_sc)*s_sc, color=c,
                              edgecolors=c, minshaft=3, linewidths=1)

           p.title='%s overlaid with %s at time %s' %(plot.name,pref,topo.sim.timestr())
           if isint: plt.ion()
           p.filename_suffix="_"+p.sheet.name
           self._generate_figure(p)
           return fig
Example #25
0
def onlineplot(data, k, end_thresh, on_denom):
    from matplotlib.pylab import ion, figure, draw, ioff, show, plot
    print "onlineplot", k, end_thresh, on_denom

    ion()
    fig = figure()
    ax = fig.add_subplot(111)
    ax.grid(True)
    ax.set_ylim((-1.5, 1.5))
    ax.set_xlim((-1.5, 1.5))

    # get k centroids
    kmr = kmeans.kmeans_runner(k, end_thresh)

    cutoffIdx = np.ceil(len(data) / np.float(on_denom))
    np.random.shuffle(data)
    initialData = data[:cutoffIdx]
    remainingData = data[cutoffIdx:]
                        
    kmr.init_data(initialData)

    print "Initial:", kmr.centroids

    plot(initialData[:,0], initialData[:,1], 'o')

    for point in remainingData:
        kmr.iterate_online(point)
        
        plot(kmr.centroids[:, 0], kmr.centroids[:, 1], 'sr')
        plot(point[0], point[1], 'ob')
        #time.sleep(.)
        draw()

    plot(kmr.centroids[:, 0], kmr.centroids[:, 1], 'g^', linewidth=3)

    ioff()
    show()
    print "Final Counts:", kmr.k_counts
    print kmr.centroids
Example #26
0
def get_ssd_as_imagebuf(features, width=493, height=352, dpi=72, cmap="jet", std=False):

    features = features.reshape(24, 7, order="F")

    if std:
        features[:, 1] = np.sqrt(features[:, 1])

    plt.ioff()
    fig = plt.figure(figsize=(int(width / dpi), int(height / dpi)), dpi=dpi)

    ax = fig.add_subplot(111)
    fig.suptitle("Statistical Spectrum Descriptors")
    ax.imshow(features, origin="lower", aspect="auto", interpolation="nearest", cmap=cmap)
    ax.set_xticklabels(["", "mean", "var", "skew", "kurt", "median", "min", "max"])
    ax.set_ylabel("Frequency [Bark]")

    img_buffer = io.BytesIO()
    plt.savefig(img_buffer, format="png")
    img_buffer.seek(0)
    plt.close()
    plt.ion()
    return base64.b64encode(img_buffer.getvalue())
Example #27
0
    def __call__(self, **params):

        p = ParamOverrides(self, params)
        fig = plt.figure(figsize=(5, 5))

        # This one-liner works in Octave, but in matplotlib it
        # results in lines that are all connected across rows and columns,
        # so here we plot each line separately:
        #   plt.plot(x,y,"k-",transpose(x),transpose(y),"k-")
        # Here, the "k-" means plot in black using solid lines;
        # see matplotlib for more info.
        isint = plt.isinteractive()  # Temporarily make non-interactive for
        # plotting
        plt.ioff()
        for r, c in zip(p.y[::p.skip], p.x[::p.skip]):
            plt.plot(c, r, "k-")
        for r, c in zip(
                np.transpose(p.y)[::p.skip],
                np.transpose(p.x)[::p.skip]):
            plt.plot(c, r, "k-")

        # Force last line avoid leaving cells open
        if p.skip != 1:
            plt.plot(p.x[-1], p.y[-1], "k-")
            plt.plot(np.transpose(p.x)[-1], np.transpose(p.y)[-1], "k-")

        plt.xlabel('x')
        plt.ylabel('y')
        # Currently sets the input range arbitrarily; should presumably figure out
        # what the actual possible range is for this simulation (which would presumably
        # be the maximum size of any GeneratorSheet?).
        plt.axis(p.axis)

        if isint: plt.ion()
        self._generate_figure(p)
        return fig
Example #28
0
def get_rh_as_imagebuf(hist, width=493, height=352, dpi=72, normalize=True):

    if len(hist.shape) == 2:
        hist = hist[0]

    if normalize:
        hist /= np.sum(hist)

    plt.ioff()

    fig = plt.figure(figsize=(int(width / dpi), int(height / dpi)), dpi=dpi)
    # plt.subplots_adjust(left=0.0, right=1.0, top=1.0, bottom=0.0)
    ax = fig.add_subplot(111)
    fig.suptitle("Rhythm Histogram")
    ax.bar(np.arange(0, 60) / 6.0, hist)
    ax.set_xlim([0.0, 10.0])
    ax.set_xlabel("Mod. Frequency Index")

    img_buffer = io.BytesIO()
    plt.savefig(img_buffer, format="png")
    img_buffer.seek(0)
    plt.close()
    plt.ion()
    return base64.b64encode(img_buffer.getvalue())
    def _plot_average_gradients(self, perturbation, gradients, sim_type=''):
        r"""
        Parameters:
        ----------
        perturbation : string
            string identifying the perturbation for which the overlap is being computed
        grads : 2D numpy array
            array containing lambda, average gradient and standard eror gradient
        sim_type : string
            Identifier for bound type or free type simulation
        """
        print(gradients)
        fplot = os.path.join(
            self._outputdir,
            perturbation) + '_' + sim_type + '_average_gradient.png'
        # turns off interactive plotting
        plt.ioff()

        fig = plt.figure(figsize=(6, 6))
        plt.errorbar(gradients[:, 0], gradients[:, 1], gradients[:, 2])
        plt.xlabel(r'$\lambda$ ')
        plt.ylabel(r'$\frac{\partial U}{\partial \lambda }$ ')
        plt.savefig(fplot, dpi=100)
        plt.close(fig)
Example #30
0
def visualize_dna(weigths,
                  pred_vec,
                  save_dir='../results/',
                  name='dna_prediction',
                  verbose=True):
    pl.ioff()
    fig = pl.figure(figsize=(20, 20))
    for ix in tq(range(pred_vec.shape[0])):
        if verbose:
            print('\nsubplotting {} of {}'.format(ix, pred_vec.shape[0]))
        ax = fig.add_subplot(pred_vec.shape[0], 1, ix + 1)
        H = abs((.25 * np.log2(.25 + 1e-7) - pred_vec[ix, :, :, 0] *
                 np.log2(pred_vec[ix, :, :, 0] + 1e-7)).sum(axis=0))
        H = np.tile(H, 4).reshape(4, pred_vec.shape[2], 1)
        plot_weights(weigths[ix] * H,
                     height_padding_factor=0.2,
                     length_padding=1.0,
                     colors=default_colors,
                     subticks_frequency=pred_vec.shape[2] / 2,
                     plot_funcs=default_plot_funcs,
                     highlight={},
                     ax=ax)
    pl.savefig(os.path.join(save_dir, name + '.png'), format='png')
    pl.close(fig)
Example #31
0
def hinton(W, maxWeight=None):
    reenable = False
    if P.isinteractive():
        P.ioff()
    P.clf()
    height, width = W.shape
    if not maxWeight:
        maxWeight = 2**numpy.ceil(numpy.log(numpy.max(numpy.abs(W)))/numpy.log(2))

    P.fill(numpy.array([0,width,width,0]),numpy.array([0,0,height,height]),'gray')
    P.axis('off')
    P.axis('equal')
    for x in xrange(width):
        for y in xrange(height):
            _x = x+1
            _y = y+1
            w = W[y,x]
            if w > maxWeight/2:
                _blob(_x - 0.5, height - _y + 0.5, min(1,w/maxWeight),'white')
            elif w <= maxWeight/2:
                _blob(_x - 0.5, height - _y + 0.5, min(1,w/maxWeight),'black')
    if reenable:
        P.ion()
    P.show()
Example #32
0
def plot_powerspectrum(fname, raw=None, picks=None, dir_plots="plots",
                       tmin=None, tmax=None, fmin=0.0, fmax=450.0, n_fft=4096):
        '''

        '''
        import os
        import matplotlib.pyplot as pl
        import mne
        from distutils.dir_util import mkpath

        if raw is None:
            assert os.path.isfile(fname), 'ERROR: file not found: ' + fname
            raw = mne.io.Raw(fname, preload=True)

        if picks is None:
            picks = jumeg_base.pick_meg_nobads(raw)

        dir_plots = os.path.join(os.path.dirname(fname), dir_plots)
        base_fname = os.path.basename(fname).strip('.fif')

        mkpath(dir_plots)

        file_name = fname.split('/')[-1]
        fnfig = dir_plots + '/' + base_fname + '-psds.png'

        pl.figure()
        pl.title('PSDS ' + file_name)
        ax = pl.axes()
        fig = raw.plot_psds(fmin=fmin, fmax=fmax, n_fft=n_fft, n_jobs=1, proj=False, ax=ax,
                            color=(0, 0, 1), picks=picks, area_mode='range')
        pl.ioff()
        # pl.ion()
        fig.savefig(fnfig)
        pl.close()

        return fname
Example #33
0
def wv_quickplot(Bpwr,
                 time,
                 wvfreq,
                 timerange=[0.0, 100.0],
                 pwr_lims=[-40, -10],
                 min_freq=500e3,
                 showPlot=True,
                 savePlot=True):
    plt.ioff(
    )  #Turn interactive mode off so that plots don't appear automatically without plt.show()

    #Create figure
    #facecolor = white, edgecolor = black
    fig = plt.figure(num=1,
                     figsize=(9.25, 4.6),
                     dpi=200,
                     facecolor='w',
                     edgecolor='k')

    #Create axes for 2D plot
    #ax=plt.axes([fromleft,frombottom,width,height])
    ax = plt.axes([0.1, 0.105, 0.85, 0.8])
    plt.xticks(np.arange(0, 130, 10), fontsize=8)
    plt.xlabel(r't [$\mu$s]', fontsize=12)
    plt.ylabel(r'$f$ [Hz]', fontsize=12)

    #prepare 2D array for plotting (flip and take log)
    plotcwt = Bpwr[0:-1]
    logplotcwt = np.log10(np.flipud(plotcwt))

    #create 2D image

    if not pwr_lims:
        print('Min max range')
        vmin = logplotcwt.min()
        vmax = logplotcwt.max()
        print(vmin)
        print(vmax)
        im = plt.imshow(logplotcwt,
                        extent=[time[0], time[-1], wvfreq[0], wvfreq[-1]],
                        vmin=vmin,
                        vmax=vmax,
                        aspect='auto')
    if pwr_lims:
        print('user range')
        im = plt.imshow(logplotcwt,
                        extent=[time[0], time[-1], wvfreq[0], wvfreq[-1]],
                        vmin=pwr_lims[0],
                        vmax=pwr_lims[1],
                        aspect='auto')

    #modify axis settings
    ax.set_yscale('log')
    plt.yticks(fontsize=6)
    plt.ylim(min_freq, wvfreq[0])

    if showPlot and not savePlot: plt.show()
    if savePlot:
        process_dir = 'C:/Users/David Schaffner/Documents/ssxpython/plots/WaveletOutputDatabase/'  #run073013_1mwb_single/chan1/'
        filename = 'wavelet_test.png'
        savefile = os.path.normpath(process_dir + filename)
        #save figure with facecolor=white, edgecolor = black
        plt.savefig(savefile, dpi=150, facecolor='w', edgecolor='k')
        plt.clf()
        plt.close(fig)
Example #34
0
def sncosmo_circlefig(simIa=None,
                      simCC=None,
                      simIapkl='bush_SncosmoSim_Ia.pkl',
                      simCCpkl='bush_SncosmoSim_CC.pkl',
                      z_range=[1.16, 2.36],
                      nsim=1000,
                      verbose=True,
                      clobber=False):
    """  Construct a color-color circle figure for SN Colfax, with observed
     photometry included.

    :param simIa:
    :param simCC:
    :param simIapkl:
    :param simCCpkl:
    :param z_range:
    :param nsim:
    :param verbose:
    :param clobber:
    :return:
    """

    import medband_classtest
    import os
    import cPickle
    from matplotlib import pyplot as pl
    # from matplotlib import patheffects as pe
    import numpy as np
    from pytools import plotsetup
    fig = plotsetup.fullpaperfig(1, figsize=[8, 4])

    pl.ioff()
    mjdpk = 55797.
    mjdmedband = 55804.

    t0_range = [mjdmedband - mjdpk - 3, mjdmedband - mjdpk + 3]
    t0 = mjdmedband - mjdpk

    if simIa is not None:
        pass
    elif os.path.isfile(simIapkl) and not clobber > 1:
        if verbose: print("Loading Ia simulation from pickle : %s" % simIapkl)
        fin = open(simIapkl, 'rb')
        simIa = cPickle.load(fin)
        fin.close()
    else:
        if verbose:
            print("Running a new Ia simulation, then saving to pickle : %s" %
                  simIapkl)
        simIa = medband_classtest.SncosmoSim('Ia',
                                             z_range=z_range,
                                             t0_range=t0_range,
                                             nsim=nsim)
        fout = open(simIapkl, 'wb')
        cPickle.dump(simIa, fout, protocol=-1)
        fout.close()

    if simCC is not None:
        pass
    elif os.path.isfile(simCCpkl) and not clobber > 1:
        if verbose: print("Loading CC simulation from pickle : %s" % simCCpkl)
        fin = open(simCCpkl, 'rb')
        simCC = cPickle.load(fin)
        fin.close()
    else:
        if verbose:
            print("Running a new CC simulation, then saving to pickle : %s" %
                  simCCpkl)
        simCC = medband_classtest.SncosmoSim('CC',
                                             z_range=z_range,
                                             t0_range=t0_range,
                                             nsim=nsim)
        fout = open(simCCpkl, 'wb')
        cPickle.dump(simCC, fout, protocol=-1)
        fout.close()

    import getredshift

    # classify.plotcontours( simIa, simCC, plotstyle='points' )
    fig = pl.gcf()
    ax1 = fig.add_subplot(1, 2, 1)
    mkcirclepoints(zrange=z_range,
                   t0=t0,
                   colorselect=[1, 0],
                   coloredaxislabels=False,
                   marker='o')
    ax2 = fig.add_subplot(1, 2, 2, sharex=ax1)
    mkcirclepoints(zrange=z_range,
                   t0=t0,
                   colorselect=[0, 2],
                   coloredaxislabels=False,
                   marker='o')

    mag4 = {  # from psf model based on the multi-epoch stack
              'f125w':25.9432,
              'f127m':25.9555,
              'f139m':25.5784,
              'f140w':26.2892,
              'f153m':25.3580,
              'f160w':25.9067,
              }

    mag6 = { # from psf model based on the multi-epoch stack
             'f125w':25.9464,
             'f127m':25.9610,
             'f139m':25.6361,
             'f140w':26.2892,
             'f153m':25.3578,
             'f160w':25.9097,
             }

    magerr = { # from drop method
         'f125w':0.106712,
         'f127m':0.184979,
         'f139m':0.214549,
         'f140w':0.1162  ,
         'f153m':0.160024,
         'f160w':0.138843,
         }
    mag = mag6

    f25 = {}
    ferr25 = {}
    for k in mag.keys():
        f25[k] = 10**(-0.4 * (mag[k] - 25.))
        ferr25[k] = magerr[k] * f25[k] / 2.5 * np.log10(np.e)

    deltamag = {
        'f127m': mag['f127m'] - mag['f125w'],
        'f139m': mag['f139m'] - mag['f140w'],
        'f153m': mag['f153m'] - mag['f160w'],
    }
    deltamagerr = {
        'f127m': np.sqrt(magerr['f127m']**2 + magerr['f125w']**2),
        'f139m': np.sqrt(magerr['f139m']**2 + magerr['f140w']**2),
        'f153m': np.sqrt(magerr['f153m']**2 + magerr['f160w']**2),
    }

    ax1.errorbar(deltamag['f139m'],
                 deltamag['f127m'],
                 deltamagerr['f127m'],
                 deltamagerr['f139m'],
                 marker='D',
                 ms=10,
                 elinewidth=2,
                 capsize=0,
                 color='darkorange')
    ax2.errorbar(deltamag['f127m'],
                 deltamag['f153m'],
                 deltamagerr['f153m'],
                 deltamagerr['f127m'],
                 marker='D',
                 ms=10,
                 elinewidth=2,
                 capsize=0,
                 color='darkorange')

    ax1.set_xlim(-0.35, 0.3)
    ax1.set_ylim(-0.7, 0.22)
    ax2.set_ylim(-0.4, 0.3)

    ax2.text(
        deltamag['f139m'] + 0.05,
        deltamag['f153m'] - 0.05,
        'GND12Bus',
        ha='left',
        va='top',
        color='darkorange',
        fontsize=15,
    )
    # path_effects=[pe.withStroke( linewidth=3,foreground='k')] )
    # pl.legend( loc='upper right', numpoints=2, handlelength=0.3)
    pl.draw()
    pl.ion()
    return (simIa, simCC)
Example #35
0
    sigma = 0.05
    kernel = create_kernel('gauss', features, sigma)
    svm = svm_train(kernel, labels, 100)
    decision_boundary_plot(svm,
                           features,
                           vectors,
                           labels,
                           kernel,
                           title='Gaussian Kernel Sigma=0.05',
                           fontsize=fontsize,
                           contourFontsize=contourFontsize,
                           show=False,
                           showColorbar=showColorbar)
    add_percent_ticks()

    #pylab.subplots_adjust(bottom=0.05, top=0.95)

    pylab.savefig(os.path.join(directory, 'params_gaussian' + extension))
    pylab.close()


####################################################################################

if __name__ == '__main__':

    extension = 'pdf'
    if len(sys.argv) > 1:
        extension = sys.argv[1]
    pylab.ioff()
    create_figures(extension)
Example #36
0
some useful functions to make map and surface plots that take advantage of variable meta data

@author: Andre R. Erler, GPL v3
'''

# external imports
import matplotlib.pylab as pyl
import matplotlib as mpl
#from mpl_toolkits.axes_grid1 import ImageGrid
linewidth = .75
mpl.rc('lines', linewidth=linewidth)
if linewidth == 1.5: mpl.rc('font', size=12)
elif linewidth == .75: mpl.rc('font', size=8)
else: mpl.rc('font', size=10)
# prevent figures from closing: don't run in interactive mode, or plt.show() will not block
pyl.ioff()
# internal imports
from plotting.misc import expandLevelList


# function to plot 
def srfcPlot():
  raise NotImplementedError
  return

# function to place (shared) colorbars at a specified figure margins
def sharedColorbar(fig, cf, clevs, colorbar, cbls, subplot, margins):
  loc = colorbar.pop('location','bottom')      
  # determine size and spacing
  if loc=='top' or loc=='bottom':
    orient = colorbar.pop('orientation','horizontal') # colorbar orientation
Example #37
0
from matplotlib import use as use_backend
use_backend("Agg")
import matplotlib.pylab as plt
plt.ioff()
# from insilico_Exp import *
from ZO_HessAware_Optimizers import *
import time
import sys
orig_stdout = sys.stdout
# model_unit = ('caffe-net', 'fc6', 1)
# CNN = CNNmodel(model_unit[0])  # 'caffe-net'
# CNN.select_unit(model_unit)
from numpy import sqrt, zeros, abs
from numpy.random import randn


#%%
class HessEstim_Gauss:
    """Code to generate samples and estimate Hessian from it"""
    def __init__(self, space_dimen):
        self.dimen = space_dimen
        self.HB = 0
        self.std = 2

    def GaussSampling(self, xmean, batch=100, std=2):
        xmean = xmean.reshape(1, -1)
        self.std = std
        self.HB = batch
        self.HinnerU = randn(
            self.HB, self.dimen
        )  # / sqrt(self.dimen)  # make it unit var along the code vector dimension
Example #38
0
def plot_compare_brain_responses(fname_orig,
                                 fname_new,
                                 event_id=1,
                                 tmin=-0.2,
                                 tmax=0.5,
                                 stim_name=None,
                                 proj=False,
                                 show=False):
    '''
    Function showing performance of signal with brain responses from
    selected components only. Plots the evoked (avg) signal of original
    data and brain responses only data along with difference between them.

    fname_orig, fname_new: str
    stim_ch: str (default STI 014)
    show: bool (default False)
    '''

    pl.ioff()
    if show:
        pl.ion()

    # Get the stimulus channel for special event from the fname_new
    # make a judgment, whether this raw data include more than one kind of event.
    # if True, use the first event as the start point of the epoches.
    # Adjust the size of the time window based on different connditions
    basename = fname_new.split('-raw.fif')[0]

    # if stim_name is given we assume that the input data are raw and
    # cleaned data ('cleaned' means data were cardiac and ocular artifacts
    # were rejected)
    if stim_name:
        fnout_fig = basename + '-' + stim_name + '.png'
    else:
        stim_name = fname_new.rsplit(',ctpsbr')[0].rsplit('ar,')[1]
        # Construct file names.
        fnout_fig = basename + '.png'

    if ',' in stim_name:
        stim_ch = 'STI 014'
    elif stim_name == 'trigger':
        stim_ch = 'STI 014'
    elif stim_name == 'response':
        stim_ch = 'STI 013'

    # Read raw, calculate events, epochs, and evoked.
    raw_orig = mne.io.Raw(fname_orig, preload=True)
    raw_br = mne.io.Raw(fname_new, preload=True)

    events = mne.find_events(raw_orig, stim_channel=stim_ch, consecutive=True)
    events = mne.find_events(raw_br, stim_channel=stim_ch, consecutive=True)

    picks_orig = mne.pick_types(raw_orig.info, meg=True, exclude='bads')
    picks_br = mne.pick_types(raw_br.info, meg=True, exclude='bads')

    epochs_orig = mne.Epochs(raw_orig,
                             events,
                             event_id,
                             proj=proj,
                             tmin=tmin,
                             tmax=tmax,
                             picks=picks_orig,
                             preload=True)
    epochs_br = mne.Epochs(raw_br,
                           events,
                           event_id,
                           proj=proj,
                           tmin=tmin,
                           tmax=tmax,
                           picks=picks_br,
                           preload=True)

    evoked_orig = epochs_orig.average()
    evoked_br = epochs_br.average()

    times = evoked_orig.times * 1e3
    if np.max(evoked_orig.data) < 1:
        factor = 1e15
    else:
        factor = 1
    ymin = np.min(evoked_orig.data) * factor
    ymax = np.max(evoked_orig.data) * factor

    # Make the comparison plot.
    pl.figure('Compare raw data', figsize=(14, 5))
    pl.subplot(1, 2, 1)
    pl.plot(times, evoked_orig.data.T * factor, 'k', linewidth=0.5)
    pl.plot(times, evoked_br.data.T * factor, 'r', linewidth=0.5)
    pl.title('Signal before (black) and after (red) cleaning')
    pl.xlim(times[0], times[len(times) - 1])
    pl.ylim(1.1 * ymin, 1.1 * ymax)

    # print out some information
    textstr1 = 'Performance: %d\nFrequency Correlation: %d'\
               % (calc_performance(evoked_orig, evoked_br),
                  calc_frequency_correlation(evoked_orig, evoked_br))
    props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
    pl.text(times[10],
            1.09 * ymax,
            textstr1,
            fontsize=10,
            verticalalignment='top',
            bbox=props)

    pl.subplot(1, 2, 2)
    evoked_diff = evoked_orig - evoked_br
    pl.plot(times, evoked_diff.data.T * factor, 'k', linewidth=0.5)
    pl.title('Difference signal')
    pl.xlim(times[0], times[len(times) - 1])
    pl.ylim(1.1 * ymin, 1.1 * ymax)

    pl.savefig(fnout_fig, format='png')
    pl.close('Compare raw data')
    pl.ion()
Example #39
0
def correction_measCube(file_hdr,
                        path_corr,
                        pixel_90,
                        arg=None,
                        name_dyes=None,
                        averaging=False,
                        plotting=False,
                        analyte='O2',
                        unit='%air',
                        save=True):
    """
    keys: 'Cube', 'corrected data', 'wavelength', 'Concentration'. If pixel are given, 'pixel of interest', 'region of
    interest' and averaged data if region of interest are selected and averaging is True
    'pixel of interest' are the pixel for the original (not rotated) cube in the shape of (x,y) -
    width (cube-Rows 1300) x height (cube-Samples 1088)
    'region of interest': dictionary for all sensor regions. The keys of the sensor regions correspond to the pixel in
    width-direction which then contain a dataframe with the pixel in height-direction as columns and the wavelength as
    an index
    :param file_hdr:
    :param path_corr:
    :param arg:
    :param name_dyes:
    :param pixel_90:
    :param averaging:
    :param plotting:
    :return:
    """
    # signal correction - define required parameter
    if (unit in file_hdr) is False:
        conc = np.nan
    else:
        conc = file_hdr.split('_cube')[0].split('_')[-1]

    # ----------------------------------------------------------------------------------------------------------------
    # correction of the whole cube
    para, itime, dic_corr, wavelength = correction_cube(file_hdr=file_hdr,
                                                        path_corr=path_corr)

    # ----------------------------------------------------------------------------------------------------------------
    # output dictionary
    cube_corr = dict({
        'Cube': para,
        'corrected data': dic_corr,
        'wavelength': wavelength,
        'Concentration': conc
    })

    # ----------------------------------------------------------------------------------------------------------------
    # split whole cube into regions of interest
    pixel_0 = [
    ]  # shape (x,y): width (cube-Rows 1300) x height (cube-Samples 1088)
    for p in pixel_90:
        px = [(px[1], para['cube'].shape[1] - px[0]) for px in p]
        pixel_0.append([px[1], px[2], px[3], px[0]])

    # dic regions: keys ~ cube-Samples (height in 0deg rotated cube / width in 90deg rotated cube)
    if name_dyes is None:
        sensor_tag = ['sensor-' + str(i) for i in np.arange(len(pixel_90))]
    else:
        sensor_tag = name_dyes

    dic_regions = dict(
        map(
            lambda en:
            (sensor_tag[en],
             dict(
                 map(
                     lambda wl:
                     (wl, dic_corr[wl].loc[pixel_0[en][0][0]:pixel_0[en][1][
                         0], pixel_0[en][0][1]:pixel_0[en][2][1]]),
                     dic_corr.keys()))), range(len(sensor_tag))))

    cube_corr['pixel of interest'] = pixel_90
    cube_corr['region of interest'] = dic_regions

    # Averaging
    if averaging is True:
        dic_mean = dict.fromkeys(set(
            cube_corr['region of interest'].keys()))  # dict()
        dic_std = dict.fromkeys(set(
            cube_corr['region of interest'].keys()))  # dict()
        for sens in cube_corr['region of interest'].keys():
            dic_mean[sens] = dict.fromkeys(
                set(cube_corr['region of interest'][sens].keys()))  # dict()
            dic_std[sens] = dict.fromkeys(
                set(cube_corr['region of interest'][sens].keys()))  # dict()
            for px_w in cube_corr['region of interest'][sens].keys():
                dic_mean[sens][px_w] = cube_corr['region of interest'][sens][
                    px_w].mean(axis=1)
                dic_std[sens][px_w] = cube_corr['region of interest'][sens][
                    px_w].std(axis=1)

        df_av = pd.DataFrame(np.zeros(shape=(len(dic_mean[sens][px_w]),
                                             len(dic_mean.keys()) * 2)),
                             index=dic_mean[sens][px_w].index)
        ls = []
        for sens in list(dic_mean.keys()):
            ls.append(sens + ' mean')
            ls.append(sens + ' STD')
        df_av.columns = ls

        for s in dic_mean.keys():
            df_av[s + ' mean'] = pd.DataFrame(dic_mean[s]).mean(axis=1)
            df_av[s + ' STD'] = pd.DataFrame(dic_mean[s]).std(axis=1)

        cube_corr['average data'] = df_av

    # ----------------------------------------------------------------------------------------------------------------
    # Plotting
    if plotting is True:
        if arg is None:
            figsize_ = (5, 3)
            fontsize_ = 13.
        else:
            figsize_ = arg['figure size meas']
            fontsize_ = arg['fontsize']

        plt.ioff()
        fig, ax = plot.plotting_averagedSignal(cube_corr,
                                               conc=conc,
                                               unit=unit,
                                               analyte=analyte,
                                               figsize_=figsize_,
                                               fontsize_=fontsize_)
        plt.show()
    else:
        fig = None
        ax = None

    # ----------------------------------------------------------------------------------------------------------------
    # Saving
    if save is True:
        df_out = pd.Series(cube_corr['region of interest'])
        df_sav = pd.Series({
            'measurement': file_hdr.split('\\')[-1],
            'corr file': path_corr,
            'sensor ID': name_dyes,
            'concentration': cube_corr['Concentration'],
            'region of interest': df_out,
            'pixel of interest': cube_corr['pixel of interest'],
            'wavelength': cube_corr['wavelength']
        })

        # directory
        path_save = file_hdr.split('/')[0] + '/output/measurement/'
        pathlib.Path(path_save).mkdir(parents=True, exist_ok=True)

        # file name
        date = str(time.gmtime().tm_year) + str(time.gmtime().tm_mon) + str(
            time.gmtime().tm_mday) + '_'
        file_name = date + file_hdr.split('/')[-1].split('.')[0]
        save_name = path_save + file_name + '_run0.hdf5'
        if os.path.isfile(save_name) == False:
            pass
        else:
            ls_files_exist = glob(path_save + '*.hdf5')
            f_exist = [f.split('_')[-1].split('.')[0] for f in ls_files_exist]

            num = 0
            for f in f_exist:
                if 'run' in f:
                    num = f.split('run')[-1]
                else:
                    pass

            save_name = path_save + file_name + '_run' + str(np.int(num) +
                                                             1) + '.hdf5'

        # save to hdf5
        df_sav.to_hdf(save_name, 'df_sav', format='f')

    return cube_corr, fig, ax
Example #40
0
#!/usr/bin/python
import sys
import pdb, traceback
import copy
import matplotlib
matplotlib.use('Agg')
import matplotlib.pylab as pl

pl.ioff()
_plot_col_name = "INFLIGHT_IOS"
_time_col_name = "ELAPSED_USECS"
_plot_time_unit_usec = 1000000

_plot_col = 1000
_time_col = 1000


def set_col_index(header):
    global _plot_col
    global _plot_col_name
    global _time_col
    global _time_col_name

    if ("#" not in header):
        print("Cannot read header to set schema for this trace file")
        raise ValueError(
            'Cannot read header to set schema for this trace file')
    records = header.split(",")
    for i in xrange(0, len(records)):
        if _plot_col_name in records[i]:
            _plot_col = i
Example #41
0
def main():
    global plot_cuboct_flag
    global seeded_num
    global SBP_output
    global output_fullpath_filename
    
    if len(sys.argv) < 3:
        print "ERROR - expecting 2 inputs: 1. geometry, 2. output filename; exiting..."
        sys.exit()
    
    print sys.argv[0]
    print sys.argv[0]
    print sys.argv[0]
    output_fullpath_filename = '../SBP_output/testing.SBP'
    
    # user inputs
    seeded_num = 2                     # number of layers seeded; typically 2
    show_vox_build_flag = False        # shows plot of vox build 
    output_shopbot_build_flag = True   # outputs shopbot build
    plot_cuboct_flag = True            # shows full cuboct lattice on plot
    SBP_output = []                    # container for sbp file
    
    # setup
    setup_vox_geometry()   # sets voxel geometry
    setup_shopbot_table()  # sets table offset and coordinate transformation
    plt.ion()              # interactive plotting; used so plt.pause will work
    
    # sample voxel structures ------------------------------------------------------
    v2x2 = np.ones((2,2,2))    # 3d square 2x2 voxel matrix (z,y,x)
    v3x3 = np.ones((3,3,3))    # 3d sqaure 3x3 voxel matrix
    v4x4 = np.ones((4,4,4))    # 3d sqaure 3x3 voxel matrix
    v5x5 = np.ones((5,5,5))    # 3d sqaure 3x3 voxel matrix
    v5x3x2 = np.ones((5,3,2))  # non-square vox structure
    v3x3x12 = np.ones((3,3,12))
    
    vWing = np.ones((2,14,5))  # Wing vox structure
    vWing[1,:,:] = 0 
    vWing[1,:,1] = 1
    
    seed = [[[1, 1],[1, 0]],[[1, 0],[0,0]]]
    
    hollow3x3 = copy.deepcopy(v3x3)   # 3d sqaure 3x3 voxel matrix
    hollow3x3[1][1][1] = 0            # add hole
        
    v2x2_seeded = seeded(v2x2)
    v3x3_seeded = seeded(v3x3)
    
    v2x2_seeded7 = seeded7(v2x2)
    # -------------------------------------------------------------------------------
    
    # input structure note: seeded is for visualization, build output assumes seeded_num (see code below)
    input_vox_structure = v2x2  # <-------------------------- USER INPUT
    
    # print input_vox_structure
    if show_vox_build_flag:
        show_vox_build(input_vox_structure)
    if output_shopbot_build_flag:
        output_shopbot_build(input_vox_structure)
    
    plt.ioff()
    plt.show()
pa['ic'] = U0
pa['stepSize'] = timeStep
pa['timeMax'] = timeMax
pa['nSteps'] = nSteps
c, p, x, f3d = stsp3d_profile(pa)

# Runs on 2D
U0 = sc.array([0.05, 0.5])
pa['ic'] = U0
lO = RK2_Autonomous(f=stsp2D, pars=pa, eParNames=['Flux_Ca'], eParList=[ff])
c = lO[0]
x = lO[1]
p = ss_p(c, pa)
#
f0 = gr.figure(figsize=(11, 7))
gr.ioff()
rows = 2
cols = 2
ax = list()
for n in range(rows * cols):
    ax.append(f0.add_subplot(rows, cols, n + 1))

ax[0].plot(timeSamples, c, '.', ms=1.0, color='darkgreen', label=r'$p$')
ax[0].plot(timeSamples, p, 'b.', ms=1.0, label=r'$p$')
ax[0].plot(timeSamples, p * x, 'k.', ms=1.0, label=r'$px$')
ax[0].plot(timeSamples, x, '.', ms=1, color='orange', label=r'$x$')
ax[1].plot(lO[0], p, 'b.', ms=1.0, label=r'$(c,p)$')
ax[1].plot(lO[0], p * x, 'k.', ms=1.0, label=r'$(c,px)$')
ax[1].plot(lO[0], x, '.', ms=1, color='orange', label=r'$(c,x)$')
ax[2].plot(timeSamples, p * x, 'k.', ms=1.0, label=r'$(c,px)$')
ax[2].plot(timeSamples, p, 'b.', ms=1.0, label=r'$p$')
Example #43
0
def LV5(argv):
    ####Packages#####
    import scipy as sc
    import scipy.stats as stats
    import scipy.integrate as integrate
    import matplotlib.pylab as p
    import sys

    #####Functions######
    def dCR_dt(pops, t=0):
        """
        function find the change in consumers and resources at time, t, in discrete time.
        """
        R = pops[0]
        C = pops[1]
        Rt1 = sc.zeros(len(t))
        Ct1 = sc.zeros(len(t))

        for i in range(len(t)):
            if i == 0:  # for first iteration
                Rt1[i] = R * (1 + r * (1 - (R / K) - a * C))
                Ct1[i] = C * (1 - z + (e * a * R))
            else:  ## every subsequent iteration
                Rt1[i] = Rt1[i - 1] * (1 + (r + E) *
                                       (1 - (Rt1[i - 1] / K) - a * Ct1[i - 1]))
                Ct1[i] = Ct1[i - 1] * (1 - z + E + (e * a * Rt1[i - 1]))

                ### if values reach or exceed 0, stop
                if Rt1[i] <= 0:
                    break

                if Ct1[i] <= 0:
                    break

        # Rt1 = Rt1[~sc.isnan(Rt1)] # the ~ cause return True only on valid numbers (https://stackoverflow.com/questions/11620914/removing-nan-values-from-an-array)
        # Ct1 = Ct1[~sc.isnan(Ct1)]
        # return sc.array([Rt1.tolist(), Ct1.tolist()])
        return sc.array([Rt1, Ct1])

    ######Main#######

    if len(sys.argv) == 1:
        r = 1.
        a = 0.1
        z = 1.5
        e = 0.75
        print("Using default arguments for r, a, z, e")

    elif len(sys.argv):
        print("Not enought arguments given.  Please give r, a, z and e")

    else:
        args = sys.argv
        r = float(args[1])
        a = float(args[2])
        z = float(args[3])
        e = float(args[4])
        print("Using args from user in order: r, a, z, e")

    K = 30  ## K arbitrarily defined
    t = sc.linspace(0, 15, 1000)
    E = stats.norm.rvs(0.5, 0.1, size=1)  #random gaussian fluctuation

    R0 = 10
    C0 = 5
    RC0 = sc.array([R0, C0])

    pops = dCR_dt(RC0, t)

    print(len(pops[1]))

    ######plotting######
    p.ioff()
    f1 = p.figure()

    p.plot(t, pops[0], "g-", label="Resource density")  #plot
    p.plot(t, pops[1], "b-", label="Consumer density")
    # p.xlim(0, 1)
    p.grid()
    p.legend(loc="best")
    p.xlabel('Time')
    p.ylabel("Population density")
    p.suptitle("Consumer-Resource population dynamics")
    p.title("r={}, a={}, z={}, e={}, K={}".format(r, a, z, e, K))
    # p.show()
    f1.savefig('../Results/LV_model_LV5.pdf')  #Save figure
    ##### Practical #####
    f2 = p.figure()
    p.plot(pops[0], pops[1], "r-")
    p.grid()
    p.xlabel("Resource density")
    p.ylabel("Consumer density")
    p.suptitle("Consumer-Resource population dynamics")
    p.title("r={}, a={}, z={}, e={}, K={}".format(r, a, z, e, K))
    # p.show()
    f2.savefig("../Results/LV_model2_LV5.pdf")
    print("r={}, a={}, z={}, e={}, K={}".format(r, a, z, e, K))
    return 0
Example #44
0
def SimpleTest():
    # Create our OESGP object
    oesgp = OESGP()

    # Our parameters
    dim = 1

    res_size = 100
    input_weight = 1.0
    output_feedback_weight = 0.1
    activation_function = 0
    leak_rate = 0.0
    connectivity = 0.1
    spectral_radius = 0.9
    kernel_params = [1.0, 1.0]
    noise = 0.1
    epsilon = 1e-3
    capacity = 100
    random_seed = 100

    # experiment boundaries
    len_episode = 1000
    input_t = np.zeros((len_episode, dim))
    output_t = np.zeros((len_episode, dim))
    prediction_t = np.zeros((len_episode, dim))
    variance_t = np.zeros((len_episode, dim))

    r = []  # np.zeros((res_size, 1))
    r_t = np.zeros((len_episode, res_size))

    # Initialise our OESGP
    oesgp.init(dim, dim, res_size, input_weight, output_feedback_weight,
               activation_function, leak_rate, connectivity, spectral_radius,
               False, kernel_params, noise, epsilon, capacity, random_seed)

    pl.ion()
    #loop through some sample code
    for i in range(0, len_episode):
        input = [sin(i * 0.01)]
        output = [sin((i + 1) * 0.01)]

        # print input

        input_t[i, 0] = input[0]
        output_t[i, 0] = output[0]

        #update the reservoir
        oesgp.update(input)
        #oesgp.update_wfeedback(input, input)

        # get state
        oesgp.getState(r)
        r_a = np.array(r)
        r_a += np.random.normal(0., 0.01, r_a.shape)
        oesgp.setState(r_a.tolist())
        # print r
        r_t[i, :] = r_a

        prediction = []
        variance = []
        #make a prediction
        oesgp.predict(prediction, variance)

        # print prediction
        # print variance
        prediction_t[i, 0] = prediction[0]
        variance_t[i, 0] = variance[0]

        #get and print the error
        error = norm(array(prediction) - array(output))
        print "Error: ", error

        #train the model
        oesgp.train(output)

        if i % 100 == 0:
            plot_state(res_size, r_t, input_t, output_t, prediction_t,
                       variance_t)

    #save the model for the future
    oesgp.save("test")

    pl.ioff()
    plot_state(res_size, r_t, input_t, output_t, prediction_t, variance_t)
    pl.show()
Example #45
0
def LV2(argv):
    ####Packages#####
    import scipy as sc
    import scipy.integrate as integrate
    import matplotlib.pylab as p
    import sys

    #####Functions######
    def dCR_dt(pops, t=0):
        """
        function find the change in consumers and resources at time, t.

        """
        R = pops[0]
        C = pops[1]
        dRdt = r * R * (1 - (R / K)) - a * R * C
        dCdt = -z * C + e * a * R * C

        return sc.array([dRdt, dCdt])

    ######Main#######

    if len(sys.argv) == 1:
        r = 1.
        a = 0.1
        z = 1.5
        e = 0.75
        print("Using default arguments for r, a, z, e")

    elif len(sys.argv):
        print("Not enought arguments given.  Please give r, a, z and e")

    else:
        args = sys.argv
        r = float(args[1])
        a = float(args[2])
        z = float(args[3])
        e = float(args[4])
        print("Using args from user in order: r, a, z, e")

    K = 50  ## K arbitrarily defined
    t = sc.linspace(0, 15, 1000)

    R0 = 10
    C0 = 5
    RC0 = sc.array([R0, C0])

    pops, infodict = integrate.odeint(dCR_dt, RC0, t, full_output=True)

    ######plotting######
    p.ioff()
    f1 = p.figure()

    p.plot(t, pops[:, 0], "g-", label="Resource density")  #plot
    p.plot(t, pops[:, 1], "b-", label="Consumer density")
    p.grid()
    p.legend(loc="best")
    p.xlabel('Time')
    p.ylabel("Population density")
    p.suptitle("Consumer-Resource population dynamics")
    p.title("r={}, a={}, z={}, e={}, K={}".format(r, a, z, e, K))
    # p.show()
    f1.savefig('../Results/LV_model_LV2.pdf')  #Save figure
    ##### Practical #####
    f2 = p.figure()
    p.plot(pops[:, 0], pops[:, 1], "r-")
    p.grid()
    p.xlabel("Resource density")
    p.ylabel("Consumer density")
    p.suptitle("Consumer-Resource population dynamics")
    p.title("r={}, a={}, z={}, e={}, K={}".format(r, a, z, e, K))
    # p.show()
    f2.savefig("../Results/LV_model2_LV2.pdf")
    print("r={}, a={}, z={}, e={}, K={}".format(r, a, z, e, K))
    return 0
Example #46
0
 def _start_report(self):
     self.was_interactive = plt.isinteractive()
     plt.ioff()
     self.f = plt.figure(figsize=(self.figwidth, self.figheight))
     self.pageno = 1
Example #47
0
    def _triangulate_ltrs(self, slam_ts, dirs, check_triangulation, manual_lms):
        """
         Triangulate the landmarks to define the LTRs.
        """

        # Isolate the landmarks from mean.
        last_estimate = slam_ts[-1]
        filename = "mean-" + last_estimate[1] + ".npy"
        last_mean = np.load(dirs["slam"] + filename)
        dof = 7  # robot state dof
        last_lms = last_mean[dof:]
        N_lm = int((last_mean.size - dof) / 3)
        lm_ids = np.arange(N_lm)

        # Find only the landmarks which were observed
        valid_lms = []
        for i in range(N_lm):
            if last_lms[3 * i] != 0:
                valid_lms.append(i)

        valid_lms_arr = np.array(valid_lms)

        last_lms = last_lms.reshape((int(last_lms.size / 3), 3))
        last_lms = last_lms[:, :2]
        not_happy = True

        if check_triangulation:
            print("Valid landmarks:", valid_lms)
            plt.ion()
            not_happy = True
        else:
            plt.ioff()
        fig = plt.figure()
        plt.axis("equal")
        while not_happy:
            last_lms_valid = last_lms[valid_lms, :]  # Remove invalids
            if (last_lms_valid.size / 2) < 3:
                logger.error(f"Too few landmarks in SLAM estimates: {last_lms_valid.size / 2} < 3")
                exit()

            if manual_lms != []:
                for tri in manual_lms:
                    if len(tri) != 3:
                        raise ValueError(
                            "Invalid manual landmarks. Incorrect size: %s" % str(manual_lms)
                        )
                valid_lms_set = set([i for i in valid_lms])
                manual_lms_set = set([j for i in manual_lms for j in i])
                if not (manual_lms_set <= valid_lms_set):
                    raise ValueError(
                        "Invalid manual landmarks. Not subset of valid landmark set. %s !<= %s"
                        % (str(manual_lms_set), str(valids_lms_set))
                    )

                self.simplices = manual_lms
                not_happy = False
            elif (last_lms_valid.size / 2) == 3:
                # If there are only 3 landmarks don't need user input
                self.simplices = np.array([valid_lms])
                not_happy = False
            else:
                triangles = Delaunay(last_lms_valid, incremental=True)
                self.simplices = triangles.simplices  # Indices of the points in each triangulation
                # Remap simplices to valid landmark ids
                remap = lambda x: valid_lms_arr[x]
                self.simplices = np.apply_along_axis(remap, 0, self.simplices)

            # Visual check for triangulation
            plt.gca().clear()
            plt.triplot(last_lms[:, 0], last_lms[:, 1], self.simplices.copy())
            for i in valid_lms:
                plt.text(*last_lms[i, :], s=str(i))

            if check_triangulation and not_happy:
                plt.draw()
                plt.pause(0.01)
                remove_str = input("Enter the IDs of landmarks to be removed (comma seperated): ")
                try:
                    remove = ast.literal_eval(remove_str)
                except Exception:
                    logger.exception("Error understanding input")
                    remove = ()
                    not_happy = False

                # If only one number entered
                if type(remove) is int:
                    remove = (remove,)
                new_valid = sorted(list(set(valid_lms) - set(remove)))
                valid_lms = new_valid
                valid_lms_arr = np.array(valid_lms)
            else:
                break
        plt.savefig(dirs["main"] + "/triangulation.pdf")
        plt.close(fig)
        plt.ioff()
Example #48
0
def train(args):
    data_pointer = 0
    data = gen_data(2000000)

    model = Model(n_steps=args.seq_length, batch_size=args.batch_size)

    iop = tf.initialize_all_variables()
    # create initialize op, this needs to be run by the session!
    # with tf.device("/gpu:1"):
    session = tf.Session()
    session.run(iop)
    # actually initialize, if you don't do this you get errors about uninitialized stuff

    saver = tf.train.Saver(tf.all_variables(), max_to_keep=100)

    # prev_state = session.run(cell.zero_state(batch_size, tf.float32))
    prev_state = session.run(
        tf.random_uniform([model.batch_size, model.cell.state_size], -1., 1.))

    allouts = []
    allcosts = []
    params = {
        "n_steps": model.n_steps,
        "batch_size": model.batch_size,
        "seq_width": model.seq_width
    }
    # training
    # pl.ion()
    # pl.figure()
    for i in range(args.train_steps):
        seq_input_data = get_seq_input_data(data_pointer, data, params)
        seq_target_data = get_seq_input_data(data_pointer + 1, data, params)

        feed = {
            model.early_stop: model.n_steps,
            model.seq_input: seq_input_data,
            model.initial_state: prev_state,
            model.target: seq_target_data
        }

        # fstate, _ = session.run([final_state, train_op], feed_dict=feed)
        session.run(model.optimizer, feed_dict=feed)
        # oS, o, o3 = session.run([outputs, output, output2], feed_dict=feed)
        # oS = session.run(outputs, feed_dict=feed)
        # o1 = session.run(output, feed_dict=feed)
        # o2 = session.run(output2, feed_dict=feed)
        # # tg = session.run(target, feed_dict=feed)
        # tg = seq_target_data.copy()
        # print type(oS), type(o1), type(o2)
        # print "o1.shape", o1.shape
        # print "o2.shape", o2.shape
        # print "tg.shape", tg.shape

        # pl.subplot(311)
        # pl.cla()
        # pl.plot(o1)
        # pl.subplot(312)
        # pl.cla()
        # for j in range(batch_size):
        #     pl.plot(o2[:,j,:])
        # pl.subplot(313)
        # pl.cla()
        # for j in range(batch_size):
        #     pl.plot(tg[:,j,:])
        # pl.draw()
        # prev_state = fstate

        if i % 1 == 0:
            tcost = session.run(model.cost, feed_dict=feed)
            allcosts.append(tcost)
            # print len(tcost)
            print "cost[%d] = %f" % (i, tcost)
        if i % 100 == 0:
            saver.save(session, "recurrent_network_1d.ckpt", global_step=i)

        data_pointer += model.n_steps

    pl.ioff()
    pl.plot(allcosts)
    pl.show()

    # saver.save(session, "recurrent_network_1b.ckpt", global_step = i)
    f = open("recurrent_network_1b_allcosts.cpkl", "wb")
    cPickle.dump(allcosts, f)
    f.close()
    saver.save(session, "recurrent_network_1d.ckpt")
Example #49
0
import sys
noshow = False
if len(sys.argv)>1:
	#print (sys.argv[1])
	if sys.argv[1] == 'noshow':
		noshow = True
		from matplotlib import use
		use('Agg')
		from matplotlib.pylab import ioff
		ioff()



data_directory = 'TRANK_nk_fit/'
from TRANK import functionize_nk_file


from matplotlib.pylab import *
from numpy import loadtxt, array
lamda_smooth = (loadtxt(data_directory+'fit_nk_fine.txt').T)[0]
lamda_points = (loadtxt(data_directory+'fit_nk.txt').T)[0]
fit_nk_f = functionize_nk_file(data_directory+'fit_nk.txt', skiprows = 0, kind = 'cubic')

## you can just load the file directly
#actual_nk_f = functionize_nk_file('Au-glass_10nm_30p_effective_nk.txt', skiprows = 0, kind = 'cubic')
# or use the setup file
from basic_setup import nk_f_list, layer_index_of_fit
actual_nk_f = nk_f_list[layer_index_of_fit]

from matplotlib.pylab import figure, gca, subplots_adjust, tight_layout, show, savefig
Example #50
0
def _demo_pipeline():
    dim_x = 2
    num_x_p = 200
    num_x_n = 200
    var_tol = .8
    num_ch = 2

    mtx_p = [np.array([[1, 0], [0, 1]]), np.array([[1, 2], [2, 1]])]
    mtx_n = [np.array([[2, 0], [0, 2]]), np.array([[1, -2], [-2, 1]])]

    x_p = np.asarray([
        np.dot(np.random.randn(num_x_p, dim_x), mtx_p[i])
        for i in range(num_ch)
    ])
    x_n = 3 + np.array([
        np.dot(np.random.randn(num_x_p, dim_x), mtx_n[i])
        for i in range(num_ch)
    ])
    y_p = [1] * num_x_p
    y_n = [0] * num_x_n

    x = np.concatenate((x_n, x_p), axis=1)
    y = np.concatenate((y_n, y_p), axis=0)

    permutation = np.random.permutation(x.shape[1])
    x = x[:, permutation, :]
    y = y[permutation]
    """ Select bandwidth of the gaussian kernel assuming data is also
        comming from a gaussian distribution.
        Ref: Silverman, Bernard W. Density estimation for statistics and data
        analysis. Vol. 26. CRC press, 1986. """
    bandwidth = 1.06 * min(np.std(x),
                           iqr(x) / 1.34) * np.power(x.shape[0], -0.2)

    pca = ChannelWisePrincipalComponentAnalysis(num_ch=x.shape[0])
    rda = RegularizedDiscriminantAnalysis()
    kde = KernelDensityEstimate(bandwidth=bandwidth)

    model = Pipeline()
    model.add(pca)
    model.add(rda)
    model.add(kde)

    plt.ion()
    fig = plt.figure()
    ax = fig.add_subplot(212)
    ax_2 = fig.add_subplot(221)
    ax_3 = fig.add_subplot(222)

    for gam in [0, .3, .6, .9]:
        for lam in [0, .3, .6, .9]:
            model.pipeline[1].lam = lam
            model.pipeline[1].gam = gam

            if gam == 0 and lam == 0:
                # Show this once only bad implementation but I don't care
                model.pipeline[0].var_tol = 0
                model.fit(x, y)
                sv_init = [
                    model.pipeline[0].list_pca[i].singular_values_
                    for i in range(len(model.pipeline[0].list_pca))
                ]
                model.pipeline[0].var_tol = var_tol
                model.fit(x, y)
                sv_final = [
                    model.pipeline[0].list_pca[i].singular_values_
                    for i in range(len(model.pipeline[0].list_pca))
                ]
                print("Initial SV:{}".format(sv_init))
                print("-- using tolerance:{} -->".format(var_tol))
                print("Final SV:{}".format(sv_final))

                print("Init dim.:{} -> Final dim.:{}".format(
                    x.shape, model.line_el[1].shape))

            model.fit_transform(x, y)

            el = model.line_el[1]
            x_min, x_max = el[:, 0].min() - 1, el[:, 0].max() + 1
            y_min, y_max = el[:, 1].min() - 1, el[:, 1].max() + 1
            xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.1),
                                 np.arange(y_min, y_max, 0.1))
            z = model.pipeline[1].predict(np.c_[xx.ravel(), yy.ravel()])
            z = z.reshape(xx.shape)

            ax.clear()
            ax_2.clear()
            ax_3.clear()
            ax.contourf(xx, yy, z, alpha=0.2, c=y, s=20)

            ax.scatter(model.line_el[1][y == 1, 0],
                       model.line_el[1][y == 1, 1],
                       c='r')
            ax.scatter(model.line_el[1][y == 0, 0],
                       model.line_el[1][y == 0, 1],
                       c='g')
            ax.set_title('after PCA')

            ax_2.scatter(x[0, y == 1, 0], x[0, y == 1, 1], c='r')
            ax_2.scatter(x[0, y == 0, 0], x[0, y == 0, 1], c='g')

            ax_3.scatter(x[1, y == 1, 0], x[1, y == 1, 1], c='r')
            ax_3.scatter(x[1, y == 0, 0], x[1, y == 0, 1], c='g')
            ax_2.set_title('1st dim')
            ax_3.set_title('2nd dim')

            fig.canvas.draw()

            time.sleep(.2)

    time.sleep(1)
    plt.ioff()
    fig_2, axn = plt.subplots()
    x_plot = np.linspace(np.min(model.line_el[-1]), np.max(model.line_el[-1]),
                         1000)[:, np.newaxis]
    axn.plot(model.line_el[2][y == 0],
             -0.005 -
             0.01 * np.random.random(model.line_el[2][y == 0].shape[0]),
             'ro',
             label='class(-)')
    axn.plot(model.line_el[2][y == 1],
             -0.005 -
             0.01 * np.random.random(model.line_el[2][y == 1].shape[0]),
             'go',
             label='class(+)')
    for idx in range(len(model.pipeline[2].list_den_est)):
        log_dens = model.pipeline[2].list_den_est[idx].score_samples(x_plot)
        axn.plot(x_plot[:, 0],
                 np.exp(log_dens),
                 'r-' * (idx == 0) + 'g--' * (idx == 1),
                 linewidth=2.0)
    axn.legend(loc='upper right')
    plt.title('Likelihoods Given the Labels')
    plt.ylabel('p(e|l)')
    plt.xlabel('scores')
    fig_2.show()
    time.sleep(10)
Example #51
0
def bestfit_spectrum(options, source, model):

    # Initialize constants
    constants = Constants()

    # Loop over number of detections
    mode_index = 0
    for mode in model.output.sline.get_mode_stats()['modes']:

        # If no detections skip further iterations
        if (model.output.ndetections == 0) and (mode_index > 0):
            continue

        # Calculate mode evidence
        mode_evidence = mode['local log-evidence']
        mode_evidence_err = mode['local log-evidence error']
        if 'continuum' in model.input.types:
            tmp = model.output.cont.get_mode_stats()['global evidence error']
            mode_evidence_err = np.sqrt(
                np.power(mode_evidence_err, 2) + np.power(tmp, 2))

        # If mode evidence less than zero then move to next iteration
        if (mode_evidence < options.detection_limit):
            continue

        # Set x-axis data
        if options.plot_restframe == 'source':
            shift_z = source.info['z']
            if options.x_units == 'optvel':
                shift_z /= constants.LIGHT_SPEED
            rest_z = shift_frame(source.spectrum.x.data, source.info['z'])
            x_data = zTOvel(rest_z, 'relativistic') * constants.LIGHT_SPEED
        elif options.plot_restframe == 'peak':
            param_index = model.input.all_ndims
            comp_index = 0.0
            shift_z = 0.0
            if model.output.ndetections == 0:
                shift_z = 0.5 * (source.spectrum.x.data[-1] +
                                 source.spectrum.x.data[0])
            else:
                if 'emission' in model.input.types:
                    shift_z += mode['maximum'][param_index]
                    comp_index += 1.0
                    param_index += 4
                if 'absorption' in model.input.types:
                    shift_z += mode['maximum'][param_index]
                    comp_index += 1.0
                shift_z /= comp_index
                if options.x_units == 'optvel':
                    shift_z /= constants.LIGHT_SPEED
            rest_z = shift_frame(source.spectrum.x.data, shift_z)
            x_data = zTOvel(rest_z, 'relativistic') * constants.LIGHT_SPEED
        elif options.x_units == 'optvel':
            x_data = source.spectrum.x.data * constants.LIGHT_SPEED
        else:
            x_data = source.spectrum.x.data
        x_diff = np.abs(x_data[1] - x_data[0])

        # Set y-axis data
        y_contsub = np.copy(source.spectrum.y.data)
        y_res = np.copy(source.spectrum.y.data)
        y_line = []
        comp_index = 0
        param_index = 0
        line_index = 0
        for typ in model.input.types:
            comp = Model()
            comp.input.priors = [np.copy(model.input.priors[comp_index])]
            comp.input.types = [typ]
            comp.input.models = [np.copy(model.input.models[comp_index])]
            comp.input.tmp.x = np.copy(source.spectrum.x.fine)
            ndims = len(comp.input.priors[0])
            comp.calculate_spectrum(
                options, source,
                mode['maximum'][param_index:param_index + ndims], ndims)
            comp.calculate_data(options, source)
            y_res -= comp.output.tmp.data
            if 'continuum' in typ:
                y_contsub -= comp.output.tmp.data
            elif (('emission' in typ) or
                  ('absorption' in typ)) and (model.output.ndetections > 0):
                y_line.append(comp.output.tmp.data)
            param_index += ndims
            comp_index += 1
        y_line = np.array(y_line)

        # If convert y-units
        if options.y_units == 'abs':
            y_contsub *= 100.0
            y_res *= 100.0
            y_line *= 100.0
        elif (options.y_units == 'Jy') or (options.y_units == 'Jy/beam'):
            y_contsub *= 1.e3
            y_res *= 1.e3
            y_line *= 1.e3

        # Set font size
        font_size = 16

        # Calculate axis limits and aspect ratio
        x_min = np.min(x_data)
        x_max = np.max(x_data)
        if options.small_plots and (model.output.ndetections != 0):
            if options.plot_restframe != 'none':
                x_centre = 0.0
            else:
                x_centre = source.info['z']
            x_min = (x_centre - 0.5 * float(options.plot_nchans) * abs(x_diff))
            x_max = (x_centre + 0.5 * float(options.plot_nchans) * abs(x_diff))
        y1_array = y_contsub[
            np.where((x_data > np.min([x_min, options.x_min]))
                     & (x_data < np.max([x_max, options.x_max])))]
        y1_min = np.min(y1_array) * float(options.ylim_scale)
        y1_max = np.max(y1_array) * float(options.ylim_scale)

        y2_array = y_res[np.where((x_data > np.min([x_min, options.x_min]))
                                  & (x_data < np.max([x_max, options.x_max])))]
        y2_min = np.min(y2_array) * float(options.ylim_scale)
        y2_max = np.max(y2_array) * float(options.ylim_scale)
        y_ratio = (y1_max - y1_min) / (y2_max - y2_min)

        # Initialize figure
        plt.ioff()
        fig = plt.figure()
        fig.subplots_adjust(hspace=0.0)
        gs = gridspec.GridSpec(2, 1, height_ratios=[y_ratio, 1])
        plt.rc('xtick', labelsize=font_size - 4)
        plt.rc('ytick', labelsize=font_size - 4)

        # Initialize subplots
        ax1 = fig.add_subplot(gs[0])
        ax2 = fig.add_subplot(gs[1])

        # Set axis limits
        ax1.set_xlim(x_min, x_max)
        ax1.set_ylim(y1_min, y1_max)
        ax2.yaxis.set_ticks(ax1.get_yticks())
        ax2.set_xlim(x_min, x_max)
        ax2.set_ylim(y2_min, y2_max)

        # Plot spectra
        #if options.channel_function == 'square':
        ax1.step(x_data,
                 y_contsub,
                 where='mid',
                 color=[0.5, 0.5, 0.5],
                 linestyle='-')
        if (model.output.ndetections > 0):
            ax1.step(x_data,
                     np.sum(y_line, 0),
                     where='mid',
                     color='k',
                     linestyle='-')
        ax2.step(x_data, y_res, where='mid', color='r', linestyle='-')
        # else:
        #    ax1.plot(x_data, y_contsub, color=[0.5,0.5,0.5], linestyle='-')
        #    ax1.plot(x_data, np.sum(y_line,0), color='k', linestyle='-')
        #    ax2.plot(x_data, y_res, color='r', linestyle='-')

        # Add labelling for each component
        if model.output.ndetections != 0:
            ymax = np.zeros(len(y_line))
            for j in range(0, len(y_line)):
                ymax[j] = np.max(np.abs(y_line[j]))
            ymax_sort = np.flipud(np.sort(ymax))
            for j in range(0, len(y_line)):
                ax1.plot(x_data,
                         y_line[j],
                         color='g',
                         linestyle='--',
                         zorder=0)
                truth = [np.abs(y_line[j]) == np.max(np.abs(y_line[j]))]
                comp_index = np.where(ymax_sort == ymax[j])
                ax1.text(np.mean(x_data[truth]) - 1. * x_diff,
                         np.max(y_contsub),
                         '%d' % (comp_index[0] + 1),
                         color='g',
                         fontsize=font_size - 2)

        # Add evidence value to plot
        if model.output.ndetections != 0:
            if options.plot_evidence:
                plt.suptitle(r'$ln(B) = %0.2f \pm %0.2f$' %
                             (mode_evidence, mode_evidence_err),
                             x=0.65,
                             y=(0.2 * y_ratio + 1) / (1 + y_ratio),
                             horizontalalignment='left',
                             fontsize=font_size - 2)

        # Add additional vertical and horizontal lines
        if options.plot_restframe != 'none':
            ax1.vlines(0.0, y1_min, y1_max, colors='k', linestyle=':')
            ax2.vlines(0.0, y2_min, y2_max, colors='k', linestyle=':')
        elif 'z' in source.info:
            ax1.vlines(float(source.info['z']) / 1e3,
                       y1_min,
                       y1_max,
                       colors='k',
                       linestyle=':')
            ax2.vlines(float(source.info['z']) / 1e3,
                       y2_min,
                       y2_max,
                       colors='k',
                       linestyle=':')
        ax1.axhline(color='k', linestyle=':', zorder=0)
        ax2.axhline(color='k', linestyle=':', zorder=0)

        # Add axis labels
        ax1.set_xlabel('')
        ax1.set_xticklabels([])
        ax2.set_ylabel('')
        if options.plot_restframe != 'none':
            ax2.set_xlabel(r"$v\,[\mathrm{km}\,\mathrm{s}^{-1}]$",
                           fontsize=font_size)
            # ax2.set_xlabel(r'$\mathrm{Relative}\,\mathrm{Gas}\,\mathrm{Velocity}\,(\mathrm{km}\,\mathrm{s}^{-1})$', fontsize=font_size)
        elif options.x_units == 'optvel':
            ax2.set_xlabel(r"$v\,[\mathrm{km}\,\mathrm{s}^{-1}]$",
                           fontsize=font_size)
        else:
            ax2.set_xlabel(r"$z$", fontsize=font_size)
        if (options.y_units == 'mJy') or (options.y_units == 'Jy'):
            ylabh = ax1.set_ylabel(r'$S\,[\mathrm{mJy}]$', fontsize=font_size)
        if (options.y_units == 'mJy/beam') or (options.y_units == 'Jy/beam'):
            ylabh = ax1.set_ylabel(r"$S\,[\mathrm{mJy}\,\mathrm{beam}^{-1}]$",
                                   fontsize=font_size)
            # ylabh = ax1.set_ylabel(r"$S\,[\mathrm{Jy}\,\mathrm{beam}^{-1}]$", fontsize=font_size)
        elif options.y_units == 'abs':
            # ylabh = ax1.set_ylabel(r'$e^{-\tau}-1 [\mathrm{per}\,\mathrm{cent}]$', fontsize=font_size)
            ylabh = ax1.set_ylabel(
                r"$\Delta{S}/S_\mathrm{c} [\mathrm{per}\,\mathrm{cent}]$",
                fontsize=font_size)
            # ylabh = ax1.set_ylabel(r"$\mathrm{Absorbed}\,\mathrm{Fraction}\,[\mathrm{per}\,\mathrm{cent}]$", fontsize=font_size)
        if options.name_switch:
            ax1.set_title('%s' % (source.info['name']), fontsize=font_size)
        ylabh.set_position(
            (ylabh.get_position()[0], 0.5 * (y_ratio - 1.0) / (y_ratio + 1.0)))
        # ylabh.set_verticalalignment('center')

        # Nice tick mark behaviour
        ax1.minorticks_on()
        ax2.minorticks_on()
        ax1.tick_params(bottom=True,
                        left=True,
                        top=True,
                        right=True,
                        length=6,
                        width=1,
                        which='major',
                        direction='in')
        ax1.tick_params(bottom=True,
                        left=True,
                        top=True,
                        right=True,
                        length=3,
                        width=1,
                        which='minor',
                        direction='in')
        ax2.tick_params(bottom=True,
                        left=True,
                        top=True,
                        right=True,
                        length=6,
                        width=1,
                        which='major',
                        direction='in')
        ax2.tick_params(bottom=True,
                        left=True,
                        top=True,
                        right=True,
                        length=3,
                        width=1,
                        which='minor',
                        direction='in')

        # Save figure to file
        if model.output.ndetections == 0:
            line_number = 0
        else:
            line_number = mode_index + 1
        plt.savefig(options.out_root + '_line_' + str(line_number) +
                    '_bestfit_spectrum.pdf')
        # plt.savefig(options.out_root+'_line_'+str(line_number)+'_bestfit_spectrum.eps')

        # Increment
        mode_index += 1
def vPrep(fname, saveDir, check=True):
    fileName = va.getFileName(fname)
    aviProps = va.getAVIinfo(fname)
    plt.ion()

    # fBackground frames
    # -----------------
    # Default values
    nFrames = 10

    bgOK = False
    bgFile = saveDir + "/bg.npy"
    if os.path.isfile(bgFile):
        print("Background file exists, loading...")
        bg = np.load(bgFile)
    else:
        print("Generating background with default settings...")
        bg = va.getBg(fname, aviProps, nFrames)
    if check:
        while not bgOK:
            plt.figure()
            plt.imshow(bg)
            plt.set_cmap('gray')
            plt.show()
            uDecision = raw_input("Background OK? [y]es; [n]o ")
            if uDecision == 'y':
                bgOK = True
            else:
                uframes = raw_input("Select number of frames ")
                bg = va.getBg(fname, aviProps, int(uframes))
    np.save(saveDir + 'bg', bg)
    print('Background file saved')

    # Threshold
    # --------------
    # Default values
    ths = 40
    morphDiameter = 10

    pmtsFileExists = False
    thsOK = False
    pmtsFile = saveDir + "/pmts.npy"
    if os.path.isfile(pmtsFile):
        print("Parameters file exists, loading...")
        pmtsFileExists = True
        filePmts = np.load(pmtsFile)
        ths, morphDiameter = va.setThreshold(fname, aviProps, bg,
                                             filePmts[0][0], filePmts[1][0])
    else:
        print("Generating threshold with default settings...")
        ths, morphDiameter = va.setThreshold(fname, aviProps, bg, ths,
                                             morphDiameter)
    if check:
        while not thsOK:
            uDecision = raw_input("Threshold OK? [y]es; [n]o ")
            if uDecision == 'y':
                thsOK = True
            else:
                while True:
                    try:
                        print "Current threshold is", ths
                        uths = int(raw_input("Select new threshold "))
                        print "Current diameter is", morphDiameter
                        umorph = int(
                            raw_input(
                                "Select new diameter to erode and dilate "))
                        break
                    except ValueError:
                        print("Invalid number, please try again ")
                ths, morphDiameter = va.setThreshold(fname, aviProps, bg, uths,
                                                     umorph)

    # Arena areas
    # --------------
    # Default values
    nestPos = [125, 220]
    nestArea = [(1, 320), (225, 320), (120, 125)]
    arenaCenter = [600, 244]
    foodArea = [(785, 360), (960, 360), (860, 185)]

    plt.ioff()
    arenaOk = False
    if pmtsFileExists:
        print("Using parameters file...")
        va.plotArena(aviProps, filePmts, bg)
        pmts = [[ths], [morphDiameter], filePmts[2], filePmts[3], filePmts[4],
                filePmts[5]]
    else:
        print("Generating arena with default settings...")
        pmts = [[ths], [morphDiameter], nestPos, nestArea, arenaCenter,
                foodArea]
        va.plotArena(aviProps, pmts, bg)
    if check:
        while not arenaOk:
            uDecision = raw_input("Arena OK? [y]es; [n]o ")
            if uDecision == 'y':
                arenaOk = True
            else:
                print("Select new arena ")
                points = va.setPoints(fname, aviProps, bg)
                pmts = [[ths], [morphDiameter], points[0], points[1],
                        points[2], points[3]]

    #print pmts[0], pmts[1]
    #fsaveName = saveDir + fileName.rstrip(".avi") + "_pmts"
    np.save(saveDir + 'pmts', pmts)
    print('Parameters file saved')
Example #53
0
def plot_performance_artifact_rejection(meg_raw,
                                        ica,
                                        fnout_fig,
                                        meg_clean=None,
                                        show=False,
                                        proj=False,
                                        verbose=False):
    '''
    Creates a performance image of the data before
    and after the cleaning process.
    '''

    from mne.preprocessing import find_ecg_events, find_eog_events
    from jumeg import jumeg_math as jmath

    name_ecg = 'ECG 001'
    name_eog_hor = 'EOG 001'
    name_eog_ver = 'EOG 002'
    event_id_ecg = 999
    event_id_eog = 998
    tmin_ecg = -0.4
    tmax_ecg = 0.4
    tmin_eog = -0.4
    tmax_eog = 0.4

    picks = mne.pick_types(meg_raw.info,
                           meg=True,
                           ref_meg=False,
                           exclude='bads')
    # as we defined x% of the explained variance as noise (e.g. 5%)
    # we will remove this noise from the data
    if meg_clean:
        meg_clean_given = True
    else:
        meg_clean_given = False
        meg_clean = ica.apply(meg_raw,
                              exclude=ica.exclude,
                              n_pca_components=ica.n_components_,
                              copy=True)

    # plotting parameter
    props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
    # check if ECG and EOG was recorded in addition
    # to the MEG data
    ch_names = meg_raw.info['ch_names']

    # ECG
    if name_ecg in ch_names:
        nstart = 0
        nrange = 1
    else:
        nstart = 1
        nrange = 1

    # EOG
    if name_eog_ver in ch_names:
        nrange = 2

    y_figsize = 6 * nrange
    perf_art_rej = np.zeros(2)

    # ToDo:  How can we avoid popping up the window if show=False ?
    pl.ioff()
    pl.figure('performance image', figsize=(12, y_figsize))
    pl.clf()

    # ECG, EOG:  loop over all artifact events
    for i in range(nstart, nrange):
        # get event indices
        if i == 0:
            baseline = (None, None)
            event_id = event_id_ecg
            idx_event, _, _ = find_ecg_events(meg_raw,
                                              event_id,
                                              ch_name=name_ecg,
                                              verbose=verbose)
            idx_ref_chan = meg_raw.ch_names.index(name_ecg)
            tmin = tmin_ecg
            tmax = tmax_ecg
            pl1 = nrange * 100 + 21
            pl2 = nrange * 100 + 22
            text1 = "CA: original data"
            text2 = "CA: cleaned data"
        elif i == 1:
            baseline = (None, None)
            event_id = event_id_eog
            idx_event = find_eog_events(meg_raw,
                                        event_id,
                                        ch_name=name_eog_ver,
                                        verbose=verbose)
            idx_ref_chan = meg_raw.ch_names.index(name_eog_ver)
            tmin = tmin_eog
            tmax = tmax_eog
            pl1 = nrange * 100 + 21 + (nrange - nstart - 1) * 2
            pl2 = nrange * 100 + 22 + (nrange - nstart - 1) * 2
            text1 = "OA: original data"
            text2 = "OA: cleaned data"

        # average the signals
        raw_epochs = mne.Epochs(meg_raw,
                                idx_event,
                                event_id,
                                tmin,
                                tmax,
                                picks=picks,
                                baseline=baseline,
                                proj=proj,
                                verbose=verbose)
        cleaned_epochs = mne.Epochs(meg_clean,
                                    idx_event,
                                    event_id,
                                    tmin,
                                    tmax,
                                    picks=picks,
                                    baseline=baseline,
                                    proj=proj,
                                    verbose=verbose)
        ref_epochs = mne.Epochs(meg_raw,
                                idx_event,
                                event_id,
                                tmin,
                                tmax,
                                picks=[idx_ref_chan],
                                baseline=baseline,
                                proj=proj,
                                verbose=verbose)

        raw_epochs_avg = raw_epochs.average()
        cleaned_epochs_avg = cleaned_epochs.average()
        ref_epochs_avg = np.average(ref_epochs.get_data(),
                                    axis=0).flatten() * -1.0
        times = raw_epochs_avg.times * 1e3
        if np.max(raw_epochs_avg.data) < 1:
            factor = 1e15
        else:
            factor = 1
        ymin = np.min(raw_epochs_avg.data) * factor
        ymax = np.max(raw_epochs_avg.data) * factor

        # plotting data before cleaning
        pl.subplot(pl1)
        pl.plot(times, raw_epochs_avg.data.T * factor, 'k')
        pl.title(text1)
        # plotting reference signal
        pl.plot(times, jmath.rescale(ref_epochs_avg, ymin, ymax), 'r')
        pl.xlim(times[0], times[len(times) - 1])
        pl.ylim(1.1 * ymin, 1.1 * ymax)
        # print some info
        textstr1 = 'num_events=%d\nEpochs: tmin, tmax = %0.1f, %0.1f' \
                   % (len(idx_event), tmin, tmax)
        pl.text(times[10],
                1.09 * ymax,
                textstr1,
                fontsize=10,
                verticalalignment='top',
                bbox=props)

        # plotting data after cleaning
        pl.subplot(pl2)
        pl.plot(times, cleaned_epochs_avg.data.T * factor, 'k')
        pl.title(text2)
        # plotting reference signal again
        pl.plot(times, jmath.rescale(ref_epochs_avg, ymin, ymax), 'r')
        pl.xlim(times[0], times[len(times) - 1])
        pl.ylim(1.1 * ymin, 1.1 * ymax)
        # print some info
        perf_art_rej[i] = calc_performance(raw_epochs_avg, cleaned_epochs_avg)
        # ToDo: would be nice to add info about ica.excluded
        if meg_clean_given:
            textstr1 = 'Performance: %d\nFrequency Correlation: %d'\
                       % (perf_art_rej[i],
                          calc_frequency_correlation(raw_epochs_avg, cleaned_epochs_avg))
        else:
            textstr1 = 'Performance: %d\nFrequency Correlation: %d\n# ICs: %d\nExplained Var.: %d'\
                       % (perf_art_rej[i],
                          calc_frequency_correlation(raw_epochs_avg, cleaned_epochs_avg),
                          ica.n_components_, ica.n_components * 100)

        pl.text(times[10],
                1.09 * ymax,
                textstr1,
                fontsize=10,
                verticalalignment='top',
                bbox=props)

    if show:
        pl.show()

    # save image
    pl.savefig(fnout_fig + '.png', format='png')
    pl.close('performance image')
    pl.ion()

    return perf_art_rej
Example #54
0
def quiverChange(frameIDs,centroidFile):
    centroids=np.loadtxt(centroidFile)
    ind=np.where(centroids[:,0]==frameIDs[0])
    xfirst=centroids[ind,2].ravel()
    yfirst=centroids[ind,3].ravel()

    
    #match all the frames to the first frame
    tol=20
    xArray,yArray,fxArray,fyArray,backArray,peakArray,qualArray=vis.matchAllPoints(centroids,xfirst,yfirst,tol,frameIDs)
    
    #get transformations by frame
    xdAll,ydAll,sxAll,syAll,rotAll,fxFrameAv,fyFrameAv,peakFrameAv,transAll = vis.getTransByFrame(xArray,yArray,fxArray,fyArray,peakArray,xfirst,yfirst)
    #sAll=(sxAll+syAll)/2
    xAv,yAv,fxAv,fyAv,peakAv,backAv,rmsVal,nMatch,xArray1,yArray1,dd,rmsX,rmsY,xd,yd = vis.getRMSStats(xArray,yArray,fxArray,fyArray,peakArray,backArray,xdAll,ydAll,sxAll,syAll,rotAll,xfirst,yfirst)

    nFrames=len(frameIDs)

    plt.ioff()
    xd=[]
    yd=[]
    for i in range(nFrames-1):
        xd.append(xArray[:,i]-xArray[:,i+1])
        yd.append(yArray[:,i]-yArray[:,i+1])

    xd=np.array(xd)
    yd=np.array(yd)


    for i in range(nFrames-1):
    #for i in range(1):
        fig,ax=plt.subplots(1,2)
        fig.set_figheight(4)
        fig.set_figwidth(10)

        dd=np.sqrt(xd[i]*xd[i]+yd[i]*yd[i])
        ind=np.where(dd < 3)

        if(i==0):
            
            Q=ax[0].quiver(xArray[ind,0],yArray[ind,0],xd[i,ind],yd[i,ind],scale=None,scale_units=None)
            ax[0].set_xlabel("X (pixels)")
            ax[0].set_ylabel("Y (pixels)")


            ax[0].quiverkey(Q,0.9, 0.98, 2,"2 pixels")

            dist=np.sqrt((xd[i,ind])**2+(yd[i,ind])**2)
            cmin=dist.min()
            cmax=dist.max()
            sc=ax[1].scatter(xArray[ind,0],yArray[ind,0],c=dist,vmin=cmin,vmax=cmax)
            ax[1].set_xlabel("X (pixels)")
            fig.colorbar(sc,ax=ax[1])

            
        else:
            ax[0].quiver(xArray[ind,0],yArray[ind,0],xd[i,ind],yd[i,ind],scale=Q.scale,scale_units=None)
            ax[0].set_xlabel("X (pixels)")
            ax[0].set_ylabel("Y (pixels)")
            ax[0].quiverkey(Q,0.9, 0.98, 2,"2 pixels")

            dist=np.sqrt((xd[i,ind])**2+(yd[i,ind])**2)
            sc=ax[1].scatter(xArray1[ind,0],yArray[ind,0],c=dist,vmin=cmin,vmax=cmax)
            ax[1].set_xlabel("X (pixels)")
            fig.colorbar(sc,ax=ax[1])

        plt.savefig("quiv1_"+str(int(frameIDs[0]))+"_"+str(int(i)).zfill(2)+".png")
Example #55
0
def correction_hyperCube(file_hdr,
                         path_corr,
                         arg=None,
                         name_dyes=None,
                         pixel_90=None,
                         averaging=False,
                         plotting=False,
                         analyte='O2',
                         unit='%air',
                         save=True):
    """ keys: 'Cube', 'corrected data', 'wavelength', 'Concentration'. If pixel are given, 'pixel of interest', 'region
    of interest' and averaged data if region of interest are selected and averaging is True
    'pixel of interest' are the pixel for the original (not rotated) cube in the shape of (x,y) -
    width (cube-Rows 1300) x height (cube-Samples 1088)
    'region of interest': dictionary for all sensor regions. The keys of the sensor regions correspond to the pixel in
    width-direction which then contain a dataframe with the pixel in height-direction as columns and the wavelength as
    an index
    :param file_hdr:
    :param path_corr:
    :param arg:
    :param name_dyes:
    :param pixel_90:
    :param averaging:
    :param plotting:
    :param save:
    :return:
    """
    # define required parameter
    if (unit in file_hdr) is False:
        conc = np.nan
    else:
        conc = file_hdr.split('_cube')[0].split('_')[-1]

    # ---------------------------------------------------------------------------------
    # correction of the whole cube
    para, itime, dic_corr, wavelength = correction_cube(file_hdr=file_hdr,
                                                        path_corr=path_corr)

    # ---------------------------------------------------------------------------------
    # output dictionary
    cube_corr = dict({
        'Cube': para,
        'corrected data': dic_corr,
        'wavelength': wavelength,
        'Concentration': conc
    })

    # ---------------------------------------------------------------------------------
    # split whole cube into regions of interest
    if pixel_90:
        cube_corr = split_roi(dic_corr=dic_corr,
                              cube_corr=cube_corr,
                              pixel=pixel_90,
                              name_dyes=name_dyes,
                              averaging=averaging)

    # ---------------------------------------------------------------------------------
    # Plotting
    if plotting is True:
        if arg is None:
            figsize_ = (5, 3)
            fontsize_ = 13.
        else:
            figsize_ = arg['figure size meas']
            fontsize_ = arg['fontsize meas']

        plt.ioff()
        fig, ax = plot.plotting_averagedSignal(cube_corr,
                                               conc=conc,
                                               unit=unit,
                                               analyte=analyte,
                                               figsize_=figsize_,
                                               fontsize_=fontsize_)
        plt.show()
    else:
        fig = None
        ax = None

    # ---------------------------------------------------------------------------------
    # Saving
    if save is True:
        df_out = pd.Series(cube_corr['region of interest'])
        df_sav = pd.Series({
            'measurement': file_hdr.split('\\')[-1],
            'corr file': path_corr,
            'sensor ID': name_dyes,
            'concentration': cube_corr['Concentration'],
            'region of interest': df_out,
            'pixel of interest': cube_corr['pixel of interest'],
            'wavelength': cube_corr['wavelength']
        })

        path_save = file_hdr.split(
            'calibration')[0] + '/output/correctionCube/'
        if os.path.isdir(path_save) == False:
            pathlib.Path(path_save).mkdir(parents=True, exist_ok=True)

        name = file_hdr.split('/')[-1].split('\\')[-1].split('.')[0]
        save_name = path_save + name + '.hdf5'
        if os.path.isfile(save_name) == False:
            pass
        else:
            ls_files_exist = glob(save_name + '*.hdf5')
            f_exist = [f.split('_')[-1].split('.')[0] for f in ls_files_exist]
            num = 0
            for f in f_exist:
                if 'run' in f:
                    num = int(f.split('run')[-1]) + 1
                else:
                    pass
            save_name = path_save + name + '_run' + str(num) + '.hdf5'

        df_sav.to_hdf(save_name, 'df_sav', format='f')

    return cube_corr, fig, ax
Example #56
0
def plot_wavelet(array,
                 shotdata,
                 Bpwr,
                 Bscalespec,
                 wvfreq,
                 Bfft,
                 fftfreq,
                 time,
                 timerange=[0.0, 100.0],
                 pwr_lims=[-40, -10],
                 min_freq=500e3,
                 showPlot=True,
                 savePlot=True):
    plt.ioff(
    )  #Turn interactive mode off so that plots don't appear automatically without plt.show()

    #Create figure
    #facecolor = white, edgecolor = black
    fig = plt.figure(num=1,
                     figsize=(9.25, 4.6),
                     dpi=200,
                     facecolor='w',
                     edgecolor='k')

    #Create axes for 2D plot
    #ax=plt.axes([fromleft,frombottom,width,height])
    ax = plt.axes([0.3, 0.105, 0.68, 0.55])
    plt.xlabel(r't [$\mu$s]', fontsize=12)
    plt.ylabel(r'$f$ [Hz]', fontsize=12)

    #prepare 2D array for plotting (flip and take log)
    plotcwt = Bpwr[0:-1]
    logplotcwt = np.log10(np.flipud(plotcwt))

    #create 2D image

    if not pwr_lims:
        print('Min max range')
        vmin = logplotcwt.min()
        vmax = logplotcwt.max()
        print(vmin)
        print(vmax)
        im = plt.imshow(logplotcwt,
                        extent=[time[0], time[-1], wvfreq[0], wvfreq[-1]],
                        vmin=vmin,
                        vmax=vmax,
                        aspect='auto')
    if pwr_lims:
        print('user range')
        im = plt.imshow(logplotcwt,
                        extent=[time[0], time[-1], wvfreq[0], wvfreq[-1]],
                        vmin=pwr_lims[0],
                        vmax=pwr_lims[1],
                        aspect='auto')

    #modify axis settings
    ax.set_yscale('log')
    plt.yticks(fontsize=6)
    plt.ylim(min_freq, wvfreq[0])

    #Create axes for time series plot (with x axis shared with 2Dplot)
    ax2 = plt.axes([0.3, 0.7, 0.68, 0.25], sharex=ax)

    #create time series image
    plt.plot(time, array, linewidth=0.5, color='blue')

    #modify axis settings
    plt.ylabel('B-dot', fontsize=12)
    plt.yticks(fontsize=6)
    plt.xticks(np.arange(0, 110, 10), fontsize=6)
    plt.xlim(timerange[0], timerange[1])

    #create total power spectrum comparison axes
    ax3 = plt.axes([0.03, 0.105, 0.2, 0.75])

    #plot total power spectra
    plt.loglog(fftfreq, Bfft, color='orange', linewidth=0.5, label='FFT')
    plt.loglog(wvfreq, Bscalespec, color='red', linewidth=1, label='Wavelet')

    #modify axis settings
    plt.xlabel(r'$f$ [Hz]', fontsize=12)
    plt.xticks(fontsize=6)
    plt.yticks(fontsize=6)
    plt.xlim(fftfreq[0], 5.0 * fftfreq[-1])

    #make simple legend
    plt.legend(loc='lower left', fontsize=7)

    #label plot
    day = shotdata[0]
    nn = shotdata[1]  #shot number
    axis_label = shotdata[2]  #axis (r,t,z)
    channel = shotdata[3]  #probe channel number
    plt.text(0.15,
             0.92,
             "Shot " + str(nn) + axis_label + " Chan:" + str(channel),
             fontsize=10,
             bbox=dict(facecolor='green', alpha=0.2),
             transform=fig.transFigure,
             horizontalalignment='center')
    plt.text(0.30,
             0.01,
             "Data Date: " + day,
             fontsize=6,
             transform=fig.transFigure,
             horizontalalignment='center')

    if showPlot and not savePlot: plt.show()
    if savePlot:
        process_dir = 'C:/Users/David Schaffner/Documents/ssxpython/plots/WaveletOutputDatabase/'  #run073013_1mwb_single/chan1/'
        filename = 'wavelet_' + day + '_shot' + str(
            nn) + '_B' + axis_label[5] + '_chan' + str(channel) + '.png'
        savefile = os.path.normpath(process_dir + filename)
        #save figure with facecolor=white, edgecolor = black
        plt.savefig(savefile, dpi=150, facecolor='w', edgecolor='k')
        plt.clf()
        plt.close(fig)
Example #57
0
def hyperCube_preparation(file_hdr,
                          arg,
                          unit,
                          name_dyes=None,
                          pixel_rot=None,
                          averaging=False,
                          plotting=False,
                          analyte='O2',
                          save=True,
                          cube_type=None):
    # Cube loading, and split into region of interest
    # --------------------------------------------------------------------------------------------------------------
    # define required parameter
    if (unit in file_hdr) is False:
        conc = np.nan
    else:
        conc = file_hdr.split('_cube')[0].split('_')[-1]

    # ---------------------------------------------------------------------------------
    # correction of the whole cube
    para, itime, dic_cube, wavelength = cube_rearrange(file_hdr=file_hdr)

    # ---------------------------------------------------------------------------------
    # output dictionary
    cube_corr = dict({
        'Cube': para,
        'cube data': dic_cube,
        'wavelength': wavelength,
        'Concentration': conc
    })

    # ---------------------------------------------------------------------------------
    # split whole cube into regions of interest
    # coordinate rotation to fit it to the original orientation of the cube
    if pixel_rot:
        if 'rotation' in arg:
            rot = arg['rotation']
        else:
            rot = 0

        pixel_0 = list()
        for px in pixel_rot:
            px_roi = list()
            for p in px:
                px_roi.append(
                    coordinate_rotation(x=p[0],
                                        y=p[1],
                                        phi=rot,
                                        cube_shape=para['cube'].shape))
            if rot == 90:
                px_roi = [px_roi[1], px_roi[-2], px_roi[-1], px_roi[0]]
            if rot == 180:
                px_roi = [px_roi[2], px_roi[-1], px_roi[0], px_roi[1]]
            if rot == 270:
                px_roi = [px_roi[-1], px_roi[0], px_roi[1], px_roi[-2]]
            # rotation for 270deg
            pixel_0.append(px_roi)

        # split cube (original orientation) and corresponding pixel
        cube = split_roi(dic_corr=dic_cube,
                         cube_corr=cube_corr,
                         pixel=pixel_0,
                         name_dyes=name_dyes,
                         averaging=averaging)

    # ---------------------------------------------------------------------------------
    # Plotting
    if plotting is True:
        if arg is None:
            figsize_ = (5, 3)
            fontsize_ = 13.
        else:
            figsize_ = arg['figure size meas']
            fontsize_ = arg['fontsize meas']

        plt.ioff()
        fig, ax = plot.plotting_averagedSignal(cube,
                                               conc=conc,
                                               unit=unit,
                                               analyte=analyte,
                                               figsize_=figsize_,
                                               fontsize_=fontsize_)
        plt.show()
    else:
        fig = None
        ax = None

    # ---------------------------------------------------------------------------------
    # Saving
    if save is True:
        df_out = pd.Series(cube_corr['region of interest'])
        df_sav = pd.Series({
            'measurement': file_hdr.split('\\')[-1],
            'sensor ID': name_dyes,
            'concentration': cube['Concentration'],
            'region of interest': df_out,
            'pixel of interest': cube['pixel of interest'],
            'wavelength': cube['wavelength']
        })

        if 'calibration' in file_hdr:
            path_save = file_hdr.split(
                'calibration')[0] + '/output/correctionCube/'
        else:
            path_save = file_hdr.split(
                'measurement')[0] + '/output/correctionCube/measurement/'

        if cube_type == 'single':
            path_save = path_save + 'singleIndicator/'
        elif cube_type == 'multiple':
            path_save = path_save + 'multiIndicator/'
        else:
            raise ValueError(
                'Define whether the cube contains single or multiple indicators'
            )

        if os.path.isdir(path_save) == False:
            pathlib.Path(path_save).mkdir(parents=True, exist_ok=True)

        name = file_hdr.split('/')[-1].split('\\')[-1].split('.')[0]
        save_name = path_save + name + '.hdf5'
        if os.path.isfile(save_name) == False:
            pass
        else:
            ls_files_exist = glob(save_name + '*.hdf5')
            f_exist = [f.split('_')[-1].split('.')[0] for f in ls_files_exist]
            num = 0
            for f in f_exist:
                if 'run' in f:
                    num = int(f.split('run')[-1]) + 1
                else:
                    pass
            save_name = path_save + name + '_run' + str(num) + '.hdf5'

        df_sav.to_hdf(save_name, 'df_sav', format='f')

    return cube, fig, ax
Example #58
0
def skydip(data,
           averagepol=True,
           tsky=300.,
           plot=False,
           temperature=288,
           pressure=101325.,
           humidity=0.5):
    """Determine the opacity from a set of 'skydip' obervations.
    This can be any set of observations over a range of elevations,
    but will ususally be a dedicated (set of) scan(s).
    Return a list of 'n' opacities for 'n' IFs. In case of averagepol
    being 'False' a list of 'n*m' elements where 'm' is the number of
    polarisations, e.g.
    nIF = 3, nPol = 2 => [if0pol0, if0pol1, if1pol0, if1pol1, if2pol0, if2pol1]

    The opacity is determined by fitting a first order polynomial to:


        Tsys(airmass) = p0 + airmass*p1

    where

        airmass = 1/sin(elevation)

        tau =  p1/Tsky

    Parameters:
        data:       a list of file names or scantables or a single
                    file name or scantable.
        averagepol: Return the average of the opacities for the polarisations
                    This might be useful to set to 'False' if one polarisation
                    is corrupted (Mopra). If set to 'False', an opacity value
                    per polarisation is returned.
        tsky:       The sky temperature (default 300.0K). This might
                    be read from the data in the future.
        plot:       Plot each fit (airmass vs. Tsys). Default is 'False'
    """
    # quiten output
    verbose = rcParams["verbose"]
    rcParams["verbose"] = False
    try:
        if plot:
            from matplotlib import pylab
        scan = _import_data(data)
        f = fitter()
        f.set_function(poly=1)
        sel = selector()
        basesel = scan.get_selection()
        inos = scan.getifnos()
        pnos = scan.getpolnos()
        opacities = []
        om = model(temperature, pressure, humidity)
        for ino in inos:
            sel.set_ifs(ino)
            opacity = []
            fits = []
            airms = []
            tsyss = []
            if plot:
                pylab.cla()
                pylab.ioff()
                pylab.clf()
                pylab.xlabel("Airmass")
                pylab.ylabel(r"$T_{sys}$")
            for pno in pnos:
                sel.set_polarisations(pno)
                scan.set_selection(basesel + sel)
                freq = scan.get_coordinate(0).get_reference_value() / 1e9
                freqstr = "%0.4f GHz" % freq
                tsys = scan.get_tsys()
                elev = scan.get_elevation()
                airmass = [1. / math.sin(i) for i in elev]
                airms.append(airmass)
                tsyss.append(tsys)
                f.set_data(airmass, tsys)
                f.fit()
                fits.append(f.get_fit())
                params = f.get_parameters()["params"]
                opacity.append(params[1] / tsky)
            if averagepol:
                opacities.append(sum(opacity) / len(opacity))
            else:
                opacities += opacity
            if plot:
                colors = ['b', 'g', 'k']
                n = len(airms)
                for i in range(n):
                    pylab.plot(airms[i], tsyss[i], 'o', color=colors[i])
                    pylab.plot(airms[i], fits[i], '-', color=colors[i])
                    pylab.figtext(0.7,
                                  0.3 - (i / 30.0),
                                  r"$\tau_{fit}=%0.2f$" % opacity[i],
                                  color=colors[i])
                if averagepol:
                    pylab.figtext(0.7,
                                  0.3 - (n / 30.0),
                                  r"$\tau_{avg}=%0.2f$" % opacities[-1],
                                  color='r')
                    n += 1
                pylab.figtext(0.7,
                              0.3 - (n / 30.0),
                              r"$\tau_{model}=%0.2f$" %
                              om.get_opacities(freq * 1e9),
                              color='grey')

                pylab.title("IF%d : %s" % (ino, freqstr))

                pylab.ion()
                pylab.draw()
                raw_input("Hit <return> for next fit...")
            sel.reset()

        scan.set_selection(basesel)
        if plot:
            pylab.close()
        return opacities
    finally:
        rcParams["verbose"] = verbose
Example #59
0
def draw_mod(mid, elem, data, sims):
#def plot2(idd, count):

  plot_rE = True
  print_rE = True
  prnt = False
  show = False
  save_fig_path = os.path.join(moddir, '%06i'%mid)

#  try:
#    os.mkdir(save_fig_path)
#  except OSError as e:
#    print 'could not create folder', e
    #return
    
  name = elem['name']
  user = elem['user']
  
  if prnt:
    print '...drawing modelling result', mid, name, user
  else:
    print '> drawing modres %06i'%mid,
  
  # create info files
  with open(os.path.join(save_fig_path, name + '.txt'), 'a'):
    pass
  with open(os.path.join(save_fig_path, user + '.txt'), 'a'):
    pass
  

  try:
    sims[name]
  except KeyError:
    print '\n!! missing sims data for', mid, name
    return

  #yerr=[elem['err_p'] - elem['y'], elem['y']- elem['err_m']]


  if plot_rE or print_rE:
    rE_mean = spg.getEinsteinR(elem['x'], elem['y'])
    #rE_max = spg.getEinsteinR(elem['x'], elem['err_p'])
    #rE_min = spg.getEinsteinR(elem['x'], elem['err_m'])
    rE_data = spg.getEinsteinR(sims[name]['x'], sims[name]['y'])

  # plotting settings
  ############################

  #where (y val) to start plotting the extr points markers
  mmax = np.max([np.max(elem['err_p']), np.max(sims[name]['y'])])
  ofs = max(round(mmax*0.5), 2) 
  rE_pos = max(round(mmax*0.75), 3) # there to draw the einsteinradius text
  # text offsets and properties
  t_dx = 0.0
  t_dy = 0.1
  t_dt = mmax/16.
  t_props = {'ha':'left', 'va':'bottom', 'fontsize':params['text.fontsize']} 
    
    
  pl.ioff()
  pl.figure()
  #panel = fig.add_subplot(1,1,1)
  
  x  = elem['x']
  #y  = elem['y']
  yp = elem['err_p']
  ym = elem['err_m']
  
  # get interpolated values
  x_ip  = np.linspace(np.min(x), np.max(x), 1000)
  #y_ip  = np.interp(x_ip, x, y)
  yp_ip = np.interp(x_ip, x, yp)
  ym_ip = np.interp(x_ip, x, ym)
  
  #genrate mask, only values between the extr. points
  pnts_x = [_['d'] for _ in elem['pnts'] ]
  mask = (x_ip > np.min(pnts_x)) & (x_ip < np.max(pnts_x))
  

  #plot the model values
  pl.plot(elem['x'], elem['err_p'], 'b')
  pl.plot(elem['x'], elem['err_m'], 'b')
  pl.fill_between(x_ip[mask], yp_ip[mask], ym_ip[mask], facecolor='blue', alpha=0.5)

  
  #plot vertical lines for point location
  for jj, p in enumerate(sorted(elem['pnts'])):
    if   p['t']=='min': c='c'
    elif p['t']=='max': c='r'
    elif p['t']=='sad': c='g'
    pl.plot([p['d'], p['d']], [0,ofs-t_dt*jj], c+':')
    pl.text(p['d']+t_dx, ofs-t_dt*jj+t_dy, p['t'], **t_props)

  # plot simulation parameter data
  pl.plot(sims[name]['x'], sims[name]['y'], 'r')
  pl.plot([0,np.max(elem['x'])], [1,1], ':m')  
  
  #titles etc
  #pl.suptitle('Analysis for ID: %s - model of: %s' % (elem['id'], name), fontsize=18)
  #pl.title('by: %s - pixrad: %i - nModels: %i' % (r'\verb|%s|'%elem['user'], elem['pxR'], elem['nMod']), fontsize=14)
  

#  if prnt:
#    print 'stat:', idd, 
#    print 'pixrad :', int(elem['nr'])-1,
#    print 'nmodels:', int(elem['nMod']),
#  if prnt & print_rE:
#    print 'rE_models = %4.2f [%4.2f...%4.2f] rE_sim = %4.2f' % (rE_mean, rE_min, rE_max, rE_data)
#  elif prnt:
#    print ''
    
  # plot einsteinradius
  if plot_rE:
    #a_re_min = np.array([rE_min, rE_min])
    #a_re_max = np.array([rE_max, rE_max])
    a_re_mean = np.array([rE_mean, rE_mean])
    a_re_data = np.array([rE_data, rE_data])
    #fbx2 = rE_max
    #fby = np.array([0.5,rE_pos-0.25])
    #fby = np.array([0.5,1,1.5])
    #a2_re_min = np.array([rE_min, rE_min, rE_min])
    #a2_re_max = np.array([rE_max, rE_max, rE_max])

    pl.plot(a_re_mean, [0,rE_pos], '--', color=(0,0.5,0))
    pl.text(rE_mean+t_dx, rE_pos+t_dy, r'$\Theta _\text{E}$ = %4.2f'%(rE_mean), **t_props)
    #pl.plot(a_re_min, [0,rE_pos-0.25], ':b')
    #pl.plot(a_re_max, [0,rE_pos-0.25], ':b')

    #if prnt: print a_re_min, fbx2, fby
    
    #pl.fill_betweenx(fby,a2_re_min, a2_re_max, alpha=0.3, edgecolor='white', facecolor=['cyan','green'], cmap=pl.cm.Accent) #facecolor='cyan',
    
    #cp1 = 0.0
    #cp2 = 1.0
    #cy = np.ones(rE_pos*4) # spaced in 1/4 steps, rE_pos is int!
    #cy[0]=0
    #cy[1]=0.5
    #cy[-1]=0
    #cy[-2]=0.5
    
    #cy = np.array([cy,cy]).transpose()
    
    pl.plot(a_re_data, [0,rE_pos+t_dt], '--r')
    pl.text(rE_data+t_dx, rE_pos+t_dt+t_dy, r'$\Theta_\text{E,sim}$ = %4.2f'%(rE_data), **t_props)
    

  
  
  pl.xlabel(r'image radius [pixels]')
  pl.ylabel(r'mean convergance [1]')  
  
  pl.xlim([0,np.max(elem['x'])])
  
  if show:
    #print 'show'
    pl.show()
  else:
    imgname = ('kappa_encl.%s'%ext)
    pl.savefig(os.path.join(save_fig_path, imgname))
    pass
  
  
  print ' ... DONE.'
  print '  - %s' % imgname
Example #60
0
    def __init__(self, parent):
        wx.Panel.__init__(self, parent)

        # this will contain all elements in this class
        mainSizer = wx.BoxSizer(wx.HORIZONTAL)

        # "interactive mode" off
        # if on, figure is redrawn every time it is updated, such as setting data in the extend() method
        plt.ioff()

        # graph figure parameters
        global BUF_LEN, FRAME_LEN, timestep, period, height, channels
        BUF_LEN = 300
        FRAME_LEN = 0.1 * BUF_LEN
        timestep = 1
        period = BUF_LEN * timestep
        height = 2000000
        channels = 12
        dpi = 100
        g_width = 3.2
        g_length = 1.8

        styles = [
            'r-', 'g-', 'y-', 'm-', 'r-', 'r-', 'g-', 'y-', 'm-', 'r-', 'r-',
            'g-'
        ]

        global times, samples
        times = np.arange(0, period, timestep)  # X values
        samples = np.zeros([BUF_LEN, channels])  # Y values

        # save initial background of all graphing canvas, for use when updating graphs
        # place data, axes, and backgrounds of matplotlib figures in lists for easy acces to handles later
        figs = []
        lines = []
        axes = []
        backgrounds = []

        #change size of graph axes labels
        matplotlib.rc('font', size='10')

        #-----------------BEGIN GRAPHS------------------

        # create a 3 by 4 grid of ECG lead graphs
        graphGrid = wx.GridBagSizer(hgap=5, vgap=10)
        # create all plots and place them in the graph grid
        for i in range(channels):
            # create label for graph and add it to grid
            label = wx.StaticText(self, label="Lead " + str(i + 1))
            graphGrid.Add(label, pos=((i * 2) % 6, i / 3))

            # initialize graph figure and add it to grid
            grafig = Figure((g_width, g_length), dpi)
            axis = grafig.add_subplot(111)
            axis.set_axis_bgcolor('black')
            y = samples[:, i]
            line = axis.plot(times[:-2 * FRAME_LEN],
                             y[FRAME_LEN:-FRAME_LEN],
                             styles[i],
                             animated=True)
            axis.set_ylim(-height, height)
            axis.set_xlim(-timestep, period - 2 * FRAME_LEN + timestep)

            canvas = FigCanvas(self, -1, grafig)
            graphGrid.Add(canvas, pos=((i * 2 + 1) % 6, i / 3))

            # We need to draw the canvas before we start animating
            grafig.canvas.draw()

            # add figure elements to the appropriate list
            figs.append(grafig)
            lines.extend(line)
            axes.append(axis)
            backgrounds.append(grafig.canvas.copy_from_bbox(axis.bbox))

        #-----------------BEGIN DATA STUFF------------------
        # Lock Access to data
        global dataget, ser_ctrl
        (datasend, dataget) = mp.Pipe()
        ser_ctrl = mp.Queue()
        self.loader = serial_reader_thread(datasend, ser_ctrl)
        self.calc = calculator_thread(2)

        # Make a convenient zipped list for simultaneous access
        self.items = zip(figs, lines, axes, backgrounds)
        self.pos = 0

        #-----------------BEGIN SIDEBAR-----------------

        # Sidebar with buttons and vitals data
        sideBar = wx.GridBagSizer(vgap=5)

        # add pause button
        self.paused = False
        self.pause_button = wx.Button(self, wx.ID_ANY, "Pause")
        self.reset_button = wx.Button(self, wx.ID_ANY, "Reset")
        self.Bind(wx.EVT_BUTTON, self.on_pause_button, self.pause_button)
        self.Bind(wx.EVT_UPDATE_UI, self.on_update_pause_button,
                  self.pause_button)

        self.Bind(wx.EVT_BUTTON, self.on_reset_button, self.reset_button)

        # add vitals information
        self.elapsed_lbl = wx.StaticText(self, label="Time elapsed: ")
        self.elapsed = wx.StaticText(self, label="00:00:00")
        #self.heartrate_lbl = wx.StaticText(self, label="Heart rate: ")
        #self.heartrate = wx.StaticText(self, label="1 bpm")

        # add sidebar elements to grid
        sideBar.Add(self.pause_button, pos=(0, 0))
        sideBar.Add(self.reset_button, pos=(1, 0))
        sideBar.Add(self.elapsed_lbl, pos=(2, 0))
        sideBar.Add(self.elapsed, pos=(3, 0))
        #sideBar.Add(self.heartrate_lbl, pos=(3,0))
        #sideBar.Add(self.heartrate, pos=(4,0))

        #-----------------ENDOF SIDEBAR-----------------

        # add graph grid and sidebar to sizer, and add that to the panel
        mainSizer.Add(graphGrid, 0, wx.ALL, 5)
        mainSizer.Add(sideBar, 0, 0, wx.CENTER)
        self.SetAutoLayout(1)
        self.SetSizerAndFit(mainSizer)

        # show panel (always last step)
        parent.Show(True)
        self.Show(True)

        # Begin timer and helper threads
        self.loader.start()
        self.calc.start()

        self.redraw_timer = wx.Timer(self)
        self.Bind(wx.EVT_TIMER, self.on_redraw_timer, self.redraw_timer)
        self.redraw_timer.Start(10)

        self.t_timer = wx.Timer(self)
        self.Bind(wx.EVT_TIMER, self.update_time, self.t_timer)
        self.t_timer.Start(10000)
        time.clock()