def centroid(stamp):
    """
    Calcula el centro de la estrella viendo un centro de masasx
    con el flujo.

    Parameters
    ----------
    stamp : (N,)array_like
            Arreglo en 2-D, representa una seccion de imagen que
            engloba a una estrella.
    Returns
    -------
    cx : float
         Coordenada x del centro de la estrella.

    cy : float
         Coordenada y del centro de la estrella.
    """
    # Se crean vectores con los indices x e y de la estampilla.
    x_vect = sp.arange(0, sp.shape(stamp)[1])
    y_vect = sp.arange(0, sp.shape(stamp)[0])
    # Se estima un centro de la estrella.
    cx = sp.median(x_vect)
    cy = sp.median(y_vect)
    # Se calcula la coordenada x del centro de la estrella.
    sum_x = sp.nansum(x_vect * stamp[cy, :])
    cx = sum_x / sp.nansum(stamp[cy, :])
    # Se calcula la coordenada y del centro de la estrella.
    sum_y = sp.nansum(y_vect * stamp[:, cx])
    cy = sum_y / sp.nansum(stamp[:, cx])
    return cx, cy
예제 #2
0
def fitdata(basedir, configfile, optintputs):
    dirio = ("ACF", "Fitted")
    inputdir = os.path.join(basedir, dirio[0])
    outputdir = os.path.join(basedir, dirio[1])

    dirlist = glob.glob(os.path.join(inputdir, "*lags.h5"))
    dirlistsig = glob.glob(os.path.join(inputdir, "*sigs.h5"))

    Ionoin = IonoContainer.readh5(dirlist[0])
    Ionoinsig = IonoContainer.readh5(dirlistsig[0])
    fitterone = Fitterionoconainer(Ionoin, Ionoinsig, configfile)
    (fitteddata, fittederror) = fitterone.fitdata(
        ISRSfitfunction, startvalfunc, exinputs=[fitterone.simparams["startfile"]]
    )

    if fitterone.simparams["Pulsetype"].lower() == "barker":
        paramlist = fitteddata
        species = fitterone.simparams["species"]
        paranamsf = ["Ne"]
    else:
        (Nloc, Ntimes, nparams) = fitteddata.shape
        fittederronly = fittederror[:, :, range(nparams), range(nparams)]

        paramnames = []
        species = fitterone.simparams["species"]
        Nions = len(species) - 1
        Nis = fitteddata[:, :, 0 : Nions * 2 : 2]
        Tis = fitteddata[:, :, 1 : Nions * 2 : 2]
        Nisum = sp.nansum(Nis, axis=2)[:, :, sp.newaxis]
        Tisum = sp.nansum(Nis * Tis, axis=2)[:, :, sp.newaxis]
        Ti = Tisum / Nisum

        nNis = fittederronly[:, :, 0 : Nions * 2 : 2]
        nTis = fittederronly[:, :, 1 : Nions * 2 : 2]
        nNisum = sp.sqrt(sp.nansum(Nis * nNis ** 2, axis=2))[:, :, sp.newaxis] / Nisum
        nTisum = sp.sqrt(sp.nansum(Nis * nTis ** 2, axis=2))[:, :, sp.newaxis]
        nTi = nTisum / Nisum

        paramlist = sp.concatenate((fitteddata, Nisum, Ti, fittederronly, nNisum, nTi), axis=2)
        for isp in species[:-1]:
            paramnames.append("Ni_" + isp)
            paramnames.append("Ti_" + isp)
        paramnames = paramnames + ["Ne", "Te", "Vi", "Nepow", "Ni", "Ti"]
        paramnamese = ["n" + ip for ip in paramnames]
        paranamsf = sp.array(paramnames + paramnamese)

    Ionoout = IonoContainer(
        Ionoin.Sphere_Coords,
        paramlist,
        Ionoin.Time_Vector,
        ver=1,
        coordvecs=Ionoin.Coord_Vecs,
        paramnames=paranamsf,
        species=species,
    )

    outfile = os.path.join(outputdir, "fitteddata.h5")
    Ionoout.saveh5(outfile)
예제 #3
0
def nandot(x1, x2):
    if len(x1.shape) == 1 and len(x2.shape) == 2:
        x1T = SP.tile(x1, [x2.shape[1], 1]).transpose()
        return SP.nansum(SP.multiply(x1T, x2), axis=0)
    elif len(x2.shape) == 1 and len(x1.shape) == 2:
        x2T = SP.tile(x2, [x1.shape[0], 1])
        return SP.nansum(SP.multiply(x1, x2T), axis=1)
    elif len(x1.shape) == 1 and len(x2.shape) == 1:
        return SP.nansum(SP.multiply(x1, x2))
    return None
예제 #4
0
    def calc_MI_fore_only(self):
        """
		Calculate the mutual information; now response variability comes from 
		responses to background; information calculated for foreground
		"""

        ds = 1. / self.num_fore_signals
        dr = self.entropy_pdf_dy

        pdf_r = ds * sp.sum(self.pdf_r_s, axis=0)
        noise_H = -dr * ds * sp.nansum(
            sp.log(self.pdf_r_s) / sp.log(2) * self.pdf_r_s, axis=(0, 1))
        response_H = -dr * sp.nansum(sp.log(pdf_r + 1e-9) / sp.log(2) * pdf_r,
                                     axis=0)
        self.entropy = response_H - noise_H
예제 #5
0
    def vi_metric(confmx):
        """computes Marina Meila's variation of information metric between two clusterings of the same data"""

        # init
        rval = {'MI': None, 'VI': None, 'Px': None, 'Py': None, 'Pxy': None, 'Hx': None, 'Hy': None}
        nx, ny = confmx.shape
        Px = sp.zeros(nx)
        Py = sp.zeros(ny)

        # compute
        tot = sp.nansum(confmx)
        for i in xrange(nx):
            Px[i] = confmx[i, :].sum() / tot
        for j in xrange(ny):
            Py[j] = confmx[:, j].sum() / tot
        Pxy = confmx / tot
        Hx = ModMetricMeila.entropy(Px)
        Hy = ModMetricMeila.entropy(Py)
        MI = ModMetricMeila.mutual_information(Px, Py, Pxy)

        # return
        rval['VI'] = Hx + Hy - 2 * MI
        rval['MI'] = MI
        rval['Pxy'] = Pxy
        rval['Px'] = Px
        rval['Py'] = Py
        rval['Hx'] = Hx
        rval['Hy'] = Hy
        return rval
예제 #6
0
def movnanmean(x, window_size, axis=0, accumulate=False):
    r'''
    Calculate the moving average for a given window size, without NaN

    Parameters
    ----------
    x : 1d numpy array
        .    
    window_size : int
        .
    axis : int
        .
        
    Returns
    -------
    
    Obs:
    ----
    Slower and higher memory consumption but can ignore NaN values in the
    window    
    '''
    if x.ndim == 1:
        y = rolling_window(x, window_size)

    if accumulate:
        z = scipy.nansum(y, axis)
    else:
        z = scipy.stats.stats.nanmean(y, axis)
    return z
예제 #7
0
def movnanmean(x, window_size, axis=0, accumulate=False):
    r'''
    Calculate the moving average for a given window size, without NaN

    Parameters
    ----------
    x : 1d numpy array
        .    
    window_size : int
        .
    axis : int
        .
        
    Returns
    -------
    
    Obs:
    ----
    Slower and higher memory consumption but can ignore NaN values in the
    window    
    '''
    if x.ndim == 1:
        y = rolling_window(x, window_size)    

    if accumulate:
        z = scipy.nansum(y, axis)
    else:
        z = scipy.stats.stats.nanmean(y, axis)
    return z
예제 #8
0
파일: lucy.py 프로젝트: yxlinaqua/lucy
def rl_standard(raw_image, psf, niter):
    """ Standerd lucy-richardson convolution

    arXiv 2002 Lauer

    """
   
    psf /= psf.sum()
    psf_inverse = psf[::-1]
    lucy = np.ones( raw_image.shape ) * raw_image.mean()

    for i in xrange( niter ):
        estimate = convolve(lucy, psf, mode='mirror')
        estimate[ np.isnan(estimate) ] = 0

        correction = convolve(raw_image/estimate, psf_inverse, mode='mirror')
        correction[ np.isnan(correction) ] = 0
        print 'Correction:',correction.mean()
        
        lucy *= correction
        print 'Means:', raw_image.mean(), lucy.mean()
        chisq = scipy.nansum((lucy - raw_image)**2 / (lucy)) / (raw_image.size-1)
        print chisq

    return lucy
예제 #9
0
def veldist_1d_rolr(plotfilename,phi=_DEFAULTPHI,R=_DEFAULTR,
                    ngrid=201,saveDir='../bar/1dvar/'):
    """
    NAME:
       veldist_1d_rolr
    PURPOSE:
       make a plot showing the influence of the bar R_OLR
    INPUT:
       plotfilename - filename for figure
       phi - Galactocentric azimuth
       R - Galactocentric radius
       ngrid - number of grid-points to calculate the los velocity distribution
               on
       saveDir - save pickles here
    OUTPUT:
       Figure in plotfilename
    HISTORY:
       2010-09-11 - Written - Bovy (NYU)
    """
    rolrs= [0.85,0.9,0.95]

    vloslinspace= (-.9,.9,ngrid)
    vloss= sc.linspace(*vloslinspace)

    vlosds= []
    basesavefilename= os.path.join(saveDir,'rolr_')
    for rolr in rolrs:
        thissavefilename= basesavefilename+'%.3f.sav' % rolr
        if os.path.exists(thissavefilename):
            print "Restoring los-velocity distribution at R_OLR %.2f" % rolr
            savefile= open(thissavefilename,'r')
            vlosd= pickle.load(savefile)
            savefile.close()
        else:
            print "Calculating los-velocity distribution at R_OLR %.2f" % rolr
            potparams= (rolr,0.01,25.*_degtorad,.8,None)
            vlosd= predictVlos(vloslinspace,
                               l=phi,
                               d=R,
                               distCoord='GCGC',
                               pot='bar',beta=0.,
                               potparams=potparams)
            vlosd= vlosd/(sc.nansum(vlosd)*(vloss[1]-vloss[0]))
            savefile= open(thissavefilename,'w')
            pickle.dump(vlosd,savefile)
            savefile.close()
        vlosds.append(vlosd)
    #Plot
    plot.bovy_print()
    plot.bovy_plot(vloss,vlosds[1],'k-',zorder=3,
                   xrange=[vloslinspace[0],vloslinspace[1]],
                   yrange=[0.,sc.amax(sc.array(vlosds).flatten())*1.1],
                   xlabel=r'$v_{\mathrm{los}} / v_0$')
    plot.bovy_plot(vloss,vlosds[0],ls='-',color='0.75',
                   overplot=True,zorder=2,lw=2.)
    plot.bovy_plot(vloss,vlosds[2],ls='-',color='0.5',
                   overplot=True,zorder=2,lw=1.5)
    plot.bovy_text(r'$\mathrm{bar}\ R_{\mathrm{OLR}}$',title=True)
    plot.bovy_text(0.36,.75,r'$R_{\mathrm{OLR}} = 0.95\ R_0$'+'\n'+r'$R_{\mathrm{OLR}} = 0.90\ R_0$'+ '\n'+r'$R_{\mathrm{OLR}} = 0.85\ R_0$')
    plot.bovy_end_print(plotfilename)
예제 #10
0
 def replaceNansByMeans(self):
     """Replace all not-a-number entries in the dataset by the means of the
     corresponding column."""
     for d in self.data.itervalues():
         means = scipy.nansum(d[:self.getLength()], axis=0) / self.getLength()
         for i in xrange(self.getLength()):
             for j in xrange(d.dim):
                 if not scipy.isfinite(d[i, j]):
                     d[i, j] = means[j]
예제 #11
0
 def replaceNansByMeans(self):
     """Replace all not-a-number entries in the dataset by the means of the
     corresponding column."""
     for d in self.data.itervalues():
         means = scipy.nansum(d[:self.getLength()], axis=0) / self.getLength()
         for i in xrange(self.getLength()):
             for j in xrange(ds.dim):
                 if not scipy.isfinite(d[i, j]):
                     d[i, j] = means[j]
예제 #12
0
    def calc_MI(self):
        """
		Calculate the mutual information between signal and response.
		"""

        cond_H = -sp.nansum(self.pdf_r_s * sp.log(self.pdf_r_s),
                            axis=0) * self.entropy_pdf_dy
        noise_H = (1 + sp.log(2 * sp.pi *
                              (self.NL_scale * self.meas_noise)**2)) / 2
        self.entropy = (cond_H - noise_H) / sp.log(2)
예제 #13
0
파일: cover.py 프로젝트: Minimong/Circulo
def average_out_degree_fraction(cover, weights=None):
    '''
    Out Degree Fraction (ODF) of a node in a cluster is the ratio between its number of external (boundary) edges
    and its internal edges. Average ODF returns the average fraction for the cluster.
    '''
    rv = []
    odf = out_degree_fraction(cover, weights)
    for i in range(len(cover)):
      ratios = odf[i]
      rv += [ nansum(ratios)/cover.subgraph(i).vcount() ]
    return rv
예제 #14
0
    def mutual_information(x, y, xy):
        """compute mutual information between the associated random variables"""

        # init
        MI = sp.zeros(x.size, y.size)

        for i in xrange(x.size):
            for j in xrange(y.size):
                MI[i, j] = xy[i, j] * sp.log(xy[i, j] / (x[i] * y[j]))

        # return
        return sp.nansum(MI)
예제 #15
0
    def mutual_information(x, y, xy):
        """compute mutual information between the associated random variables"""

        # init
        MI = sp.zeros(x.size, y.size)

        for i in xrange(x.size):
            for j in xrange(y.size):
                MI[i, j] = xy[i, j] * sp.log(xy[i, j] / (x[i] * y[j]))

        # return
        return sp.nansum(MI)
예제 #16
0
def kullbackLeibler(p, q, dx=1, nan=False):
    """
    NAME:
       kullbackLeibler
    PURPOSE:
       Calculate the Kullback-Leibler divergence D(p||q)
    INPUT:
       p - probability density at points i
       q - another probability density at points i
       dx - distance between points i (can be an array)
       nan - ignore nans
    OUTPUT:
       D(p||q)
    HISTORY:
       2010-05-09 - Written - Bovy (NYU)
    """
    if nan:
        return sc.nansum(p * dx * sc.log(p / q))
    else:
        return sc.sum(p * dx * sc.log(p / q))
예제 #17
0
    def vi_metric(confmx):
        """computes Marina Meila's variation of information metric between two clusterings of the same data"""

        # init
        rval = {
            'MI': None,
            'VI': None,
            'Px': None,
            'Py': None,
            'Pxy': None,
            'Hx': None,
            'Hy': None
        }
        nx, ny = confmx.shape
        Px = sp.zeros(nx)
        Py = sp.zeros(ny)

        # compute
        tot = sp.nansum(confmx)
        for i in xrange(nx):
            Px[i] = confmx[i, :].sum() / tot
        for j in xrange(ny):
            Py[j] = confmx[:, j].sum() / tot
        Pxy = confmx / tot
        Hx = ModMetricMeila.entropy(Px)
        Hy = ModMetricMeila.entropy(Py)
        MI = ModMetricMeila.mutual_information(Px, Py, Pxy)

        # return
        rval['VI'] = Hx + Hy - 2 * MI
        rval['MI'] = MI
        rval['Pxy'] = Pxy
        rval['Px'] = Px
        rval['Py'] = Py
        rval['Hx'] = Hx
        rval['Hy'] = Hy
        return rval
예제 #18
0
    def entropy(x):
        """compute entropy of a discrete random variable"""

        return -sp.nansum(x * sp.log(x))
예제 #19
0
def invertRSTO(RSTO,Iono,alpha_list=1e-2,invtype='tik',rbounds=[100,200],Nlin=0):
    """ This will run the inversion program given an ionocontainer, an alpha and """
    
    nlout,ntout,nl=Iono.Param_List.shape
    if Nlin !=0:
        nl=Nlin
    
    nlin=len(RSTO.Cart_Coords_In)
    time_out=RSTO.Time_Out
    time_in=RSTO.Time_In
    overlaps = RSTO.overlaps
    xin,yin,zin=RSTO.Cart_Coords_In.transpose()
    z_u=sp.unique(zin)
    rplane=sp.sqrt(xin**2+yin**2)*sp.sign(xin)
    r_u=sp.unique(rplane)
    n_z=z_u.size
    n_r=r_u.size
    dims= [n_r,n_z]
    
    rin,azin,elin=RSTO.Sphere_Coords_In.transpose()
    
    anglist=RSTO.simparams['angles']
    ang_vec=sp.array([[i[0],i[1]] for i in anglist])
    
    # trim out cruft
    
    zmin,zmax=[150,500]
    rpmin,rpmax=rbounds#[-50,100]#[100,200]
    altlog= sp.logical_and(zin>zmin,zin<zmax)
    rplog=sp.logical_and(rplane>rpmin,rplane<rpmax)
    allrng= RSTO.simparams['Rangegatesfinal']
    dR=allrng[1]-allrng[0]
    nldir=sp.ceil(int(nl)/2.)
    posang_log1= sp.logical_and(ang_vec[:,0]<=180.,ang_vec[:,0]>=0)
    negang_log1 = sp.logical_or(ang_vec[:,0]>180.,ang_vec[:,0]<0)
    azin_pos = sp.logical_and(azin<=180.,azin>=0)
    azin_neg = sp.logical_or(azin>180.,azin<0)
    minangpos=0
    minangneg=0
    
    
    if sp.any(posang_log1):
        minangpos=ang_vec[posang_log1,1].min()
    if sp.any(negang_log1):
        minangneg=ang_vec[negang_log1,1].min()
    
    rngbounds=[allrng[0]-nldir*dR,allrng[-1]+nldir*dR]
    rng_log=sp.logical_and(rin>rngbounds[0],rin<rngbounds[1])
    elbounds_pos=sp.logical_and(azin_pos,elin>minangpos)
    elbounds_neg=sp.logical_and(azin_neg,elin>minangneg)
    
    elbounds=sp.logical_or(elbounds_pos,elbounds_neg)
    keeplog=sp.logical_and(sp.logical_and(rng_log,elbounds),sp.logical_and(altlog,rplog))
    keeplist=sp.where(keeplog)[0]
    nlin_red=len(keeplist)
    # set up derivative matrix
    dx,dy=diffmat(dims)
    dx_red=dx[keeplist][:,keeplist]
    dy_red=dy[keeplist][:,keeplist]
    # need the sparse vstack to make srue things stay sparse
    D=sp.sparse.vstack((dx_red,dy_red))
    # New parameter matrix
    new_params=sp.zeros((nlin,len(time_out),nl),dtype=Iono.Param_List.dtype)
    if isinstance(alpha_list,numbers.Number):
        alpha_list=[alpha_list]*nl
    ave_datadif=sp.zeros((len(time_out),nl))
    ave_data_const = sp.zeros_like(ave_datadif)
    q=1e10
    for itimen, itime in enumerate(time_out):
        print('Making Outtime {0:d} of {1:d}'.format(itimen+1,len(time_out)))
        #allovers=overlaps[itimen]
        #curintimes=[i[0] for i in allovers]
        #for it_in_n,it in enumerate(curintimes):
        #print('\t Making Intime {0:d} of {1:d}'.format(it_in_n+1,len(curintimes)))
        #A=RSTO.RSTMat[itimen*nlout:(itimen+1)*nlout,it*nlin:(it+1)*nlin]
        A=RSTO.RSTMat[itimen*nlout:(itimen+1)*nlout,itimen*nlin:(itimen+1)*nlin]
        Acvx=cvx.Constant(A[:,keeplist])
        for ip in range(nl):
            alpha=alpha_list[ip]*2
            print('\t\t Making Lag {0:d} of {1:d}'.format(ip+1,nl))
            datain=Iono.Param_List[:,itimen,ip]
            xr=cvx.Variable(nlin_red)
            xi=cvx.Variable(nlin_red)
            if invtype.lower()=='tik':
                constr=alpha*cvx.norm(xr,2)
                consti=alpha*cvx.norm(xi,2)
            elif invtype.lower()=='tikd':
                constr=alpha*cvx.norm(D*xr,2)
                consti=alpha*cvx.norm(D*xi,2)
            elif invtype.lower()=='tv':
                constr=alpha*cvx.norm(D*xr,1)
                consti=alpha*cvx.norm(D*xi,1)
            br=datain.real/q
            bi=datain.imag/q
            if ip==0:
                objective=cvx.Minimize(cvx.norm(Acvx*xr-br,2)+constr)
                constraints= [xr>=0]
                prob=cvx.Problem(objective)
                result=prob.solve(verbose=True,solver=cvx.SCS,use_indirect=True,max_iters=4000)
#                    new_params[keeplog,it,ip]=xr.value.flatten()
                xcomp=sp.array(xr.value).flatten()*q
            else:
                objective=cvx.Minimize(cvx.norm(Acvx*xr-br,2)+constr)
                prob=cvx.Problem(objective)
                result=prob.solve(verbose=True,solver=cvx.SCS,use_indirect=True,max_iters=4000)
                
                objective=cvx.Minimize(cvx.norm(Acvx*xi-bi,2)+consti)
                prob=cvx.Problem(objective)
                result=prob.solve(verbose=True,solver=cvx.SCS,use_indirect=True,max_iters=4000)
                xcomp=sp.array(xr.value + 1j*xi.value).flatten()*q
#                    new_params[keeplog,it,ip]=xcomp
            new_params[keeplog,itimen,ip]=xcomp
            ave_datadif[itimen,ip]=sp.sqrt(sp.nansum(sp.absolute(A[:,keeplist].dot(xcomp)-datain)**2))
            if invtype.lower()=='tik':
                sumconst=sp.sqrt(sp.nansum(sp.power(sp.absolute(xcomp),2)))
            elif invtype.lower()=='tikd':
                dx=D.dot(xcomp)
                sumconst=sp.sqrt(sp.nansum(sp.power(sp.absolute(dx),2)))
            elif invtype.lower()=='tv':
                dx=D.dot(xcomp)
                sumconst=sp.nansum(sp.absolute(dx))
            ave_data_const[itimen,ip]=sumconst
            # set up nans                    
            new_params[sp.logical_not(keeplog),itimen]=sp.nan
    datadif=sp.nanmean(ave_datadif,axis=0)
    constval=sp.nanmean(ave_data_const,axis=0)
    ionoout=IonoContainer(coordlist=RSTO.Cart_Coords_In,paramlist=new_params,times = time_out,sensor_loc = sp.zeros(3),ver =0,coordvecs =
        ['x','y','z'],paramnames=Iono.Param_Names[:Nlin])
        
    return (ionoout,datadif,constval)
예제 #20
0
def veldist_1d_Rphi(plotfilename,nx=100,ny=20,dx=_XWIDTH/20.,dy=_YWIDTH/20.,
                    ngrid=201,rrange=[0.7,1.3],
                    phirange=[-m.pi/2.,m.pi/2.],
                    saveDir='../bar/1dLarge/',normalize=True,
                    row=None):
    """
    NAME:
       veldist_1d_Rphi
    PURPOSE:
       plot how the los-velocity distribution changes as a function of 
       R and phi
    INPUT:
       nx - number of plots in the x-direction
       ny - number of plots in the y direction
       dx - x-spacing
       dy - y-spacing
       ngrid - number of gridpoints to evaluate the density on
       rrange - range of Galactocentric radii to consider
       phirange - range of Galactic azimuths to consider
       saveDir - directory to save the pickles in
       normalize - if True (default), normalize the los-vd to integrate to one
       row - if set to a row number, calculate the los-velocity distributions 
             for this row only, and do not plot anything (just save for later)
    OUTPUT:
       plot!
    HISTORY:
       2010-04-21 - Written - Bovy (NYU)
    """
    if row == None:
        rowStart= 0
        rowEnd= nx
        calcOnly= False
    else:
        rowStart= row
        rowEnd= rowStart+1
        calcOnly= True

    vloslinspace= (-.9,.9,ngrid)
    vloss= sc.linspace(*vloslinspace)

    picklebasename= '1d_%i_%i_%i_%.1f_%.1f_%.1f_%.1f' % (nx,ny,ngrid,rrange[0],rrange[1],phirange[0],phirange[1])
    if not os.path.exists(saveDir):
        os.mkdir(saveDir)
    left, bottom = 0.1, 0.1
    width= (nx*_XWIDTH+(nx-1)*dx)/(1.-2.*left)
    height= (ny*_YWIDTH+(ny-1)*dy)/(1.-2.*bottom)
    if not calcOnly:
        plot.bovy_print(fig_width=width,fig_height=height,
                        xtick_major_size=2.,ytick_major_size=2.,
                        xtick_minor_size=0.,ytick_minor_size=0.)
        fig= pyplot.figure()
        #Make theta-R axes
        fudge= 8.0
        thisax= fig.add_axes([left-_XWIDTH/width/fudge,
                              bottom-_XWIDTH/height/fudge,
                              1.+2*_XWIDTH/width/fudge-2*left,
                              1.+2*_XWIDTH/height/fudge-2*bottom])
        xrange= sc.array(phirange)*_radtodeg
        yrange=rrange
        thisax.xaxis.set_label_text(r'$\mathrm{Galactocentric\ azimuth}\ [\mathrm{deg}]$')
        thisax.set_xlim(-90.01,90.01)
        thisax.yaxis.set_label_text(r'$\mathrm{Galactocentric\ radius}\ / R_0$')
        thisax.set_ylim(yrange[0],yrange[1])
        thisax.xaxis.set_major_locator(ticker.MultipleLocator(10.))
        
    for ii in range(rowStart,rowEnd):
        for jj in range(ny):
            if not calcOnly:
                thisax= fig.add_axes([left+ii*(_XWIDTH+dx)/width,
                                      bottom+jj*(_YWIDTH+dy)/height,
                                      _XWIDTH/width,_YWIDTH/height])
            thisR= (rrange[0]+(rrange[1]-rrange[0])/
                    (ny*_YWIDTH+(ny-1)*dy)*(jj*(_YWIDTH+dy)+_YWIDTH/2.))
            thisphi= (phirange[0]+(phirange[1]-phirange[0])/
                      (nx*_XWIDTH+(nx-1)*dx)*(ii*(_XWIDTH+dx)+_XWIDTH/2.))
            thissavefilename= os.path.join(saveDir,picklebasename+'_%i_%i.sav' %(ii,jj))
            if os.path.exists(thissavefilename):
                print "Restoring los-velocity distribution at %.1f, %.1f ..." %(thisR,thisphi)
                savefile= open(thissavefilename,'r')
                vlosd= pickle.load(savefile)
                axivlosd= pickle.load(savefile)
                savefile.close()
            else:
                print "Calculating los-velocity distribution at %.1f, %.1f ..." %(thisR,thisphi)
                vlosd= predictVlos(vloslinspace,
                                   l=thisphi,
                                   d=thisR,
                                   distCoord='GCGC',
                                   pot='bar',beta=0.,
                                   potparams=(0.9,0.01,25.*_degtorad,.8,None))
                axivlosd= predictVlos(vloslinspace,
                                      l=thisphi,
                                      d=thisR,
                                      distCoord='GCGC',
                                      pot='bar',beta=0.,t=-0.00001,
                                      potparams=(0.9,0.0,25.*_degtorad,.8,None))
                if normalize:
                    vlosd= vlosd/(sc.nansum(vlosd)*(vloss[1]-vloss[0]))
                    axivlosd= axivlosd/(sc.nansum(axivlosd)*(vloss[1]-vloss[0]))
                savefile= open(thissavefilename,'w')
                pickle.dump(vlosd,savefile)
                pickle.dump(axivlosd,savefile)
                savefile.close()
            if calcOnly:
                continue
            fig.sca(thisax)
            plot.bovy_plot(vloss,vlosd,'k',
                           overplot=True,zorder=3)
            plot.bovy_plot(vloss,axivlosd,ls='-',color='0.5',
                           overplot=True,zorder=2)
            thisax.set_xlim(vloslinspace[0],vloslinspace[1])
            thisax.set_ylim(0.,sc.amax(sc.concatenate((axivlosd,vlosd)))*1.1)
            thisax.xaxis.set_ticklabels('')
            thisax.yaxis.set_ticklabels('')
    if not calcOnly:
        plot.bovy_end_print(plotfilename)
예제 #21
0
파일: analysis.py 프로젝트: firasm/CEST
def fit_5_peaks_cest(scn_to_analyse, fitrounds = 1):

    scn = sarpy.Scan(scn_to_analyse)
    pdata_num = 0
    x_size = scn.pdata[pdata_num].data.shape[0]
    y_size = scn.pdata[pdata_num].data.shape[1]  
    try:
        roi = scn.adata['roi'].data
    except KeyError:
        roi = scn.pdata[0].data[:,:,0]*0+1

    # Get the bbox so that the whole image isn't fit
    try:
        bbox = scn.adata['bbox'].data
    except KeyError:       
        bbox = numpy.array([0,x_size-1,0,y_size-1])   

    datashape = roi.shape
    roi_reshaped = numpy.reshape(roi,[roi.shape[0],roi.shape[1],1])
    cestscan_roi = scn.pdata[0].data * roi_reshaped       

    # Fit multiple peaks, need some empty arrays
    offst = numpy.empty_like(roi) + numpy.nan
    pk1_amp = numpy.empty_like(roi) + numpy.nan
    pk1_pos = numpy.empty_like(roi) + numpy.nan
    pk1_width = numpy.empty_like(roi) + numpy.nan

    pk2_amp = numpy.empty_like(roi) + numpy.nan
    pk2_pos = numpy.empty_like(roi) + numpy.nan
    pk2_width = numpy.empty_like(roi) + numpy.nan

    pk3_amp = numpy.empty_like(roi) + numpy.nan
    pk3_pos = numpy.empty_like(roi) + numpy.nan
    pk3_width = numpy.empty_like(roi) + numpy.nan

    pk4_amp = numpy.empty_like(roi) + numpy.nan
    pk4_pos = numpy.empty_like(roi) + numpy.nan
    pk4_width = numpy.empty_like(roi) + numpy.nan

    water_amp = numpy.empty_like(roi) + numpy.nan
    water_pos = numpy.empty_like(roi) + numpy.nan
    water_width = numpy.empty_like(roi) + numpy.nan

    fit_quality = numpy.empty_like(roi) + numpy.nan
    fit_params_arr = numpy.empty_like(roi, dtype=object)
    fit_params_arr[:] = [numpy.nan]
    ppm_corrected_arr = numpy.empty_like(roi, dtype=object)

    # Defining parameters
    freq_list = scn.method.CEST_FreqListPPM

    ppm_limit_min = -50
    ppm_limit_max = 50
    exclude_ppm = 200
    normalize_to_ppm = 66.6

    possibleNormalizations = [i for i, x in enumerate(freq_list) if numpy.abs(x - normalize_to_ppm) <1E-4]
    normalizeTo = possibleNormalizations[-1]

    # Get only the frequencies within the ppm_limit
    ppm_filtered = [f for f in freq_list if ppm_limit_max > f > ppm_limit_min]

    # Exclude the dummy frequencies at the beginning (66.6 ppm)
    ppm_filtered = sorted([n for n in ppm_filtered if n!= exclude_ppm])

    # Get the index of the good frequencies relative to the original list 
    ppm_filtered_ind = [freq_list.index(c) for c in ppm_filtered]  

    # get the freqs that'll be used for water fit
    water_fit_freqs = [f for f in ppm_filtered if (numpy.abs(f)< 3.)]
    water_fit_freqs_ind = sorted([ppm_filtered.index(c) for c in water_fit_freqs])

    # Create some empty arrays
    water_shifts = numpy.empty_like(roi) + numpy.nan
    new_shifted = numpy.empty(shape=(water_shifts.shape[0], water_shifts.shape[0], len(ppm_filtered))) + numpy.nan

    # Create temporary structs to store paramArrays
    tempstruct = numpy.zeros((1), dtype=[('offset', 'float64'),
       ('A1', 'float64'),('w1', 'float64'),('p1', 'float64'),
       ('A2', 'float64'),('w2', 'float64'),('p2', 'float64'),
       ('A3', 'float64'),('w3', 'float64'),('p3', 'float64'),
       ('A4', 'float64'),('w4', 'float64'),('p4', 'float64'),
       ('water_A', 'float64'),('water_w', 'float64'),('water_p', 'float64')])

    newstruct = numpy.zeros(roi.shape, dtype=[('offset', 'float64'),
       ('A1', 'float64'),('w1', 'float64'),('p1', 'float64'),
       ('A2', 'float64'),('w2', 'float64'),('p2', 'float64'),
       ('A3', 'float64'),('w3', 'float64'),('p3', 'float64'),
       ('A4', 'float64'),('w4', 'float64'),('p4', 'float64'),
       ('water_A', 'float64'),('water_w', 'float64'),('water_p', 'float64')])

    # Nan the array so there are no zeroes anywhere
    newstruct[:] = numpy.nan
    #tempstruct[:] = numpy.nan

    # Fit count, this counts the number of rounds the data has been fit
    fitcount = 0

    while fitcount < fitrounds:
        for xval in range(bbox[0],bbox[1]):    
            for yval in range(bbox[2],bbox[3]):
                # Get the data and normalize it to index of normalize_to_ppm
                tmp = cestscan_roi[xval,yval][ppm_filtered_ind] / scn.pdata[0].data[xval,yval,normalizeTo]           

                # Check to make sure I'm inside the ROI
                if numpy.isfinite(numpy.sum(tmp)):            
                    # First do the water fit and shift the data so water is at 0  
                    shiftParams = fit_water_peak(tmp[water_fit_freqs_ind],water_fit_freqs,allParams=True)
                    shift = shiftParams[3]
                    water_shifts[xval,yval] = shift

                    # Interpolation happens here
                    if numpy.isfinite(shift):
                        s_shifted_back = scipy.interp(ppm_filtered, ppm_filtered+shift/2, tmp)
                        new_shifted[xval,yval,:] = s_shifted_back       
                    else:
                        print(shift)
                        pass            

                    testParams = get_neighbours_starting(fit_params_arr,xval,yval)
                    testParams = h_convertBetweenStructArrays(testParams,toType = 'array')

                    fit_params,cov,infodict,mesg,ier = scipy.optimize.leastsq(
                                                                h_residual_Zspectrum_N,
                                                                testParams,
                                                                args=(new_shifted[xval,yval], ppm_filtered), 
                                                                full_output = True,
                                                                maxfev = 900,
                                                                ftol =1E-9)
                    # Specify paramsets for peaks:
                    #TOFIX: why is the offset applied to each peak
                    #pk1 = [fit_params[0]]+list(fit_params[1:4])
                    #pk2 = [fit_params[0]]+list(fit_params[4:7])
                    #pk3 = [fit_params[0]]+list(fit_params[7:10])
                    #pk4 = [fit_params[0]]+list(fit_params[10:13])
                    #waterpk = [fit_params[0]]+list(fit_params[13:16]) 

                    offst[xval,yval] = fit_params[0]
                    pk1_amp[xval,yval] = fit_params[1]
                    pk1_width[xval,yval] = fit_params[2]
                    pk1_pos[xval,yval] = fit_params[3]

                    pk2_amp[xval,yval] = fit_params[4]
                    pk2_width[xval,yval] = fit_params[5]
                    pk2_pos[xval,yval] = fit_params[6]

                    pk3_amp[xval,yval] = fit_params[7]
                    pk3_width[xval,yval] = fit_params[8]
                    pk3_pos[xval,yval] = fit_params[9]

                    pk4_amp[xval,yval] = fit_params[10]
                    pk4_width[xval,yval] = fit_params[11]            
                    pk4_pos[xval,yval] = fit_params[12]

                    water_amp[xval,yval] = fit_params[13]
                    water_width[xval,yval] = fit_params[14]            
                    water_pos[xval,yval] = fit_params[15]                
                  
                    fit_quality[xval,yval] = scipy.nansum(numpy.abs(new_shifted - h_zspectrum_N(fit_params,ppm_filtered-shift)))
                    fit_params_arr[xval,yval] = fit_params
                    ppm_corrected_arr[xval,yval] = ppm_filtered

        fitcount+=1 # increment fitcounter
    
    # Save the data as a structured array

    newstruct['offset'] = offst
    newstruct['A1'] = pk1_amp
    newstruct['w1'] = pk1_width
    newstruct['p1'] = pk1_pos
    newstruct['A2'] = pk2_amp
    newstruct['w2'] = pk2_width
    newstruct['p2'] = pk2_pos
    newstruct['A3'] = pk3_amp
    newstruct['w3'] = pk3_width
    newstruct['p3'] = pk3_pos
    newstruct['A4'] = pk4_amp
    newstruct['w4'] = pk4_width
    newstruct['p4'] = pk4_pos
    newstruct['water_A'] = water_amp
    newstruct['water_w'] = water_width
    newstruct['water_p'] = water_pos

    return {'':newstruct,'fit_quality':fit_quality}
예제 #22
0
def parametersweep(basedir,configfile,acfdir='ACF',invtype='tik'):
    """ 
        This function will run the inversion numerious times with different constraint
        parameters. This will create a directory called cost and place.
        Input
        basedir - The directory that holds all of the data for the simulator.
        configfile - The ini file for the simulation.
        acfdir - The directory within basedir that hold the acfs to be inverted.
        invtype - The inversion method that will be tested. Can be tik, tikd, and tv.
    """

    alpha_sweep=sp.logspace(-3.5,sp.log10(7),25)
    costdir = os.path.join(basedir,'Cost')
    ionoinfname=os.path.join(basedir,acfdir,'00lags.h5')
    ionoin=IonoContainer.readh5(ionoinfname)
    
    dirio = ('Spectrums','Mat','ACFMat')
    inputdir = os.path.join(basedir,dirio[0])
    
    dirlist = glob.glob(os.path.join(inputdir,'*.h5'))
    (listorder,timevector,filenumbering,timebeg,time_s) = IonoContainer.gettimes(dirlist)
    Ionolist = [dirlist[ikey] for ikey in listorder]
    
    RSTO = RadarSpaceTimeOperator(Ionolist,configfile,timevector,mattype='Sim')
    
    npts=RSTO.simparams['numpoints']
    
    ionospec=makeionocombined(dirlist)
    if npts==ionospec.Param_List.shape[-1]:
        tau,acfin=spect2acf(ionospec.Param_Names,ionospec.Param_List)
        nloc,ntimes=acfin.shape[:2]
        ambmat=RSTO.simparams['amb_dict']['WttMatrix']
        np=ambmat.shape[0]
        acfin_amb=sp.zeros((nloc,ntimes,np),dtype=acfin.dtype)
    # get the original acf
    
    
        ambmat=RSTO.simparams['amb_dict']['WttMatrix']
        np=ambmat.shape[0]
            
        for iloc,locarr in enumerate(acfin):
            for itime,acfarr in enumerate(locarr):
                acfin_amb[iloc,itime]=sp.dot(ambmat,acfarr)
        acfin_amb=acfin_amb[:,0]
    else:
        acfin_amb=ionospec.Param_List[:,0]
        
    if not os.path.isdir(costdir):
        os.mkdir(costdir)
    # pickle file stuff 
    pname=os.path.join(costdir,'cost{0}-{1}.pickle'.format(acfdir,invtype))

    alpha_list=[]
    errorlist=[]
    errorlaglist=[]
    datadiflist=[]
    constlist=[]
    if 'perryplane' in basedir.lower() or 'SimpData':
        rbounds=[-500,500]
    else:
        rbounds=[0,500]

    alpha_list_new=alpha_sweep.tolist()
    for i in alpha_list:
        if i in alpha_list_new:
            alpha_list_new.remove(i)
    
    for i in alpha_list_new:
        ionoout,datadif,constdif=invertRSTO(RSTO,ionoin,alpha_list=i,invtype=invtype,rbounds=rbounds,Nlin=1)
        
        datadiflist.append(datadif)
        constlist.append(constdif)
        acfout=ionoout.Param_List[:,0]
        alpha_list.append(i)
        outdata=sp.power(sp.absolute(acfout-acfin_amb),2)
        aveerror=sp.sqrt(sp.nanmean(outdata,axis=0))
        errorlaglist.append(aveerror)
        errorlist.append(sp.nansum(aveerror))
        
    pickleFile = open(pname, 'wb')
    pickle.dump([alpha_list,errorlist,datadiflist,constlist,errorlaglist],pickleFile)
    pickleFile.close()
    mkalphalist(pname)
    alphaarr=sp.array(alpha_list)
    errorarr=sp.array(errorlist)
    errorlagarr=sp.array(errorlaglist)
    datadif=sp.array(datadiflist)
    constdif=sp.array(constlist)
    fig,axlist,axmain=plotalphaerror(alphaarr,errorarr,errorlagarr)
    fig.savefig(os.path.join(costdir,'cost{0}-{1}.png'.format(acfdir,invtype)))
    
    fig,axlist=plotLcurve(alphaarr,datadif,constdif)
    fig.savefig(os.path.join(costdir,'lcurve{0}-{1}.png'.format(acfdir,invtype)))
예제 #23
0
def veldist_1d_slope(plotfilename,phi=_DEFAULTPHI,R=_DEFAULTR,
                     ngrid=201,saveDir='../bar/1dvar/'):
    """
    NAME:
       veldist_1d_slope
    PURPOSE:
       make a plot showing the influence of the shape of the rotation curve
    INPUT:
       plotfilename - filename for figure
       phi - Galactocentric azimuth
       R - Galactocentric radius
       ngrid - number of grid-points to calculate the los velocity distribution
               on
       saveDir - save pickles here
    OUTPUT:
       Figure in plotfilename
    HISTORY:
       2010-05-15 - Written - Bovy (NYU)
    """
    slopes= [-0.2,-0.1,0.,0.1,0.2]

    vloslinspace= (-.9,.9,ngrid)
    vloss= sc.linspace(*vloslinspace)

    vlosds= []
    basesavefilename= os.path.join(saveDir,'slope_')
    for slope in slopes:
        thissavefilename= basesavefilename+'%.1f.sav' % slope
        if os.path.exists(thissavefilename):
            print "Restoring los-velocity distribution at slope %.1f" % slope
            savefile= open(thissavefilename,'r')
            vlosd= pickle.load(savefile)
            savefile.close()
        else:
            print "Calculating los-velocity distribution at slope %.1f" % slope
            potparams= (0.9,0.01,25.*_degtorad,.8,None)
            vlosd= predictVlos(vloslinspace,
                               l=phi,
                               d=R,
                               distCoord='GCGC',
                               pot='bar',beta=slope,
                               potparams=potparams)
            vlosd= vlosd/(sc.nansum(vlosd)*(vloss[1]-vloss[0]))
            savefile= open(thissavefilename,'w')
            pickle.dump(vlosd,savefile)
            savefile.close()
        vlosds.append(vlosd)
    #Plot
    plot.bovy_print()
    plot.bovy_plot(vloss,vlosds[2],'k-',zorder=3,
                   xrange=[vloslinspace[0],vloslinspace[1]],
                   yrange=[0.,sc.nanmax(sc.array(vlosds).flatten())*1.1],
                   xlabel=r'$v_{\mathrm{los}} / v_0$')
    plot.bovy_plot(vloss,vlosds[0],ls='-',color='0.75',
                   overplot=True,zorder=2,lw=2.)
    plot.bovy_plot(vloss,vlosds[1],ls='-',color='0.60',
                   overplot=True,zorder=2,lw=2.)
    plot.bovy_plot(vloss,vlosds[3],ls='-',color='0.45',
                   overplot=True,zorder=2,lw=1.5)
    plot.bovy_plot(vloss,vlosds[4],ls='-',color='0.3',
                   overplot=True,zorder=2,lw=1.5)
    plot.bovy_text(r'$\mathrm{shape\ of\ the\ rotation\ curve}$',title=True)
    plot.bovy_text(0.5,.5,r'$\beta = -0.2$'+'\n'+r'$\beta = -0.1$'+ '\n'+
                   r'$\beta = \phantom{-}0.0$'+'\n'+
                   r'$\beta= \phantom{-}0.1$'+'\n'+
                   r'$\beta= \phantom{-}0.2$')
    plot.bovy_end_print(plotfilename)
예제 #24
0
def extract(data, varimg, fitwidth=10., extractwidth=1.5, thresh=5.):
    """
	extract(data,varimg,width=WIDTH,nsig=NSIG,noise=NOISE)

	From an input 2d array, find and extract spectral traces.

	Inputs:
	  data       - 2d science spectrum
	  varimg     - 2d variance spectrum
	  fitwidth   - width to fit profile to in pixels
	  extractwidth - width to extract (in sigma)
	  thresh       - signal/noise threshold for extraction

	Outputs:
	  a list containing the [profile, extracted spectrum, a smoothed
	    spectrum, the extracted variance spectrum] for each extracted
	    trace
	"""

    WIDTH = fitwidth
    NSIG = extractwidth
    NOISE = thresh
    FILTSIZE = 7

    data = data.copy()
    spectra = []

    # Replace nan with zero
    data[scipy.isnan(data)] = 0.
    varimg[scipy.isnan(varimg)] = 0.
    data[scipy.isinf(data)] = 0.
    varimg[scipy.isinf(varimg)] = 0.

    # Create model of real flux. We ignore the slit ends, which may have
    #  artifacts from the resampling.
    slit = data[:, 8:-8].astype(scipy.float32)
    var = varimg[:, 8:-8]

    # OK...so negative-variance also isn't good; set these pixels to zero
    var[var < 0] = 0

    # Create noise models
    sigmaimg = slit / scipy.sqrt(var)
    highpix = scipy.where(sigmaimg > 1.5, sigmaimg, 0.)
    source_columns = highpix.sum(axis=0)

    # MASKING DISABLED (this would take only columns with lotsa flux...)
    #	mask = scipy.where(source_columns>4.,1.,scipy.nan)
    mask = source_columns * 0.

    # Condition 1, dealing with bad pixels
    if (var == 0).any():
        cond = var == 0
        var[cond] = scipy.nan
        slit[cond] = scipy.nan
        mask = scipy.where(cond, 0, 1)
        flux = scipy.nansum(slit / var, axis=1) / scipy.nansum(1. / var,
                                                               axis=1)
        noise = scipy.sqrt(scipy.nansum(var, axis=1)) / mask.sum(axis=1)
    # Condition 2, no masking
    elif scipy.nansum(mask) == 0:
        flux = (slit / var).sum(axis=1) / (1. / var).sum(axis=1)
        noise = scipy.sqrt(var.sum(axis=1)) / mask.size
    # Condition 3, masking
    else:
        fluxmodel = slit * mask
        noisemodel = var * mask

        noise = scipy.sqrt(scipy.nansum(noisemodel,
                                        axis=1)) / scipy.nansum(mask)
        flux = stats.stats.nanmean(fluxmodel, axis=1)

    # A smooth S/N estimate for the slit
#	sig2noise = ndimage.gaussian_filter1d(flux,1)/noise

    row = scipy.arange(flux.size)
    model = flux.copy()
    nspec = 10  # Maximum number of attempts
    while nspec:
        nspec -= 1

        # Fit a gaussian around the peak of the S/N model
        start = model.argmax() - WIDTH
        end = model.argmax() + WIDTH + 1
        if start < 0:
            start = 0.
        if end > model.size:
            end = model.size

        fitarr = model[start:end]
        p = scipy.zeros(4)
        p[1] = fitarr.max()
        p[2] = fitarr.argmax()
        p[3] = 2.

        fit, val = special_functions.ngaussfit(fitarr, p)
        chi2 = val / (fitarr.size - 3)
        fit[2] += start

        # If the centroid doesn't lie on the slit, get use the edge pix
        midcol = fit[2].round()
        if midcol >= flux.size:
            midcol = flux.size - 1
        elif midcol < 0:
            midcol = 0
        # Require a reasonable S/N and width
        if fit[3] > fitarr.size / 2. or fit[3] < 0.85:
            break
        elif fit[0] > 0 and fit[1] < NOISE * noise[midcol]:
            break
        elif fit[0] < 0 and fit[1] - fit[0] < NOISE * noise[midcol]:
            break
        else:
            fit[1] += fit[0]
            fit[0] = 0.
            # Subtract away a model of the source
            source = special_functions.ngauss(row, fit)
            model -= scipy.where(source > noise, source, 0.)

            # Skip Slits off the edge
            if fit[2] < 0 or fit[2] >= flux.size:
                continue
            # Skip residuals!
            if fit[1] < scipy.sqrt(flux[fit[2]]):
                continue
            fit[1] = 1.
            weight = special_functions.ngauss(row, fit)
            cond = (row > fit[2] - fit[3] * NSIG) & (row <
                                                     fit[2] + fit[3] * NSIG)
            weight = scipy.where(cond, weight, 0)
            weight /= weight.sum()
            spec = weight * data.T
            spec = spec.sum(axis=1)
            varspec = weight * varimg.T
            varspec = varspec.sum(axis=1)
            spec[varspec == 0] = 0.
            smooth = signal.wiener(spec, FILTSIZE, varspec)
            smooth[scipy.isnan(smooth)] = 0.
            spectra.append([fit, spec, smooth, varspec])
    return spectra
예제 #25
0
    '''
    end
    '''

    stitchLower = sp.searchsorted(wavelength, 5570, side="left")
    stitchUpper = sp.searchsorted(wavelength, 5590, side="right")

    flux[stitchLower:stitchUpper] = sp.nan

    flux[flux < 0] = sp.nan

    smoothFlux = convolve(flux, Box1DKernel(width))[5 * width:-5 * width]
    flux = flux[5 * width:-5 * width]
    wavelength = wavelength[5 * width:-5 * width]

    totalCounts = sp.nansum(flux)
    #spike = sp.median(sp.diff(flux[::10]))

    plt.plot(wavelength, smoothFlux)
    '''
    start
    '''

    #testing = sp.diff(flux[::10])
    #testing2 = (testing==testing and abs(testing)>10)
    #    counts = [abs(testing)]
    #to do: look into 'spikeness'
    '''
    end
    '''
예제 #26
0
파일: bovy_plot.py 프로젝트: ritabanc/galpy
def bovy_dens2d(X, **kwargs):
    """
    NAME:

       bovy_dens2d

    PURPOSE:

       plot a 2d density with optional contours

    INPUT:

       first argument is the density

       matplotlib.pyplot.imshow keywords (see http://matplotlib.sourceforge.net/api/axes_api.html#matplotlib.axes.Axes.imshow)

       xlabel - (raw string!) x-axis label, LaTeX math mode, no $s needed

       ylabel - (raw string!) y-axis label, LaTeX math mode, no $s needed

       xrange

       yrange

       noaxes - don't plot any axes

       overplot - if True, overplot

       colorbar - if True, add colorbar

       shrink= colorbar argument: shrink the colorbar by the factor (optional)

       Contours:
       
       contours - if True, draw contours (10 by default)

       levels - contour-levels

       cntrmass - if True, the density is a probability and the levels 
                  are probability masses contained within the contour

       cntrcolors - colors for contours (single color or array)

       cntrlabel - label the contours

       cntrlw, cntrls - linewidths and linestyles for contour

       cntrlabelsize, cntrlabelcolors,cntrinline - contour arguments

       onedhists - if True, make one-d histograms on the sides

       onedhistcolor - histogram color

       retAxes= return all Axes instances

    OUTPUT:

    HISTORY:

       2010-03-09 - Written - Bovy (NYU)

    """
    if kwargs.has_key('overplot'):
        overplot = kwargs['overplot']
        kwargs.pop('overplot')
    else:
        overplot = False
    if not overplot:
        pyplot.figure()
    if kwargs.has_key('xlabel'):
        xlabel = kwargs['xlabel']
        kwargs.pop('xlabel')
    else:
        xlabel = None
    if kwargs.has_key('ylabel'):
        ylabel = kwargs['ylabel']
        kwargs.pop('ylabel')
    else:
        ylabel = None
    if kwargs.has_key('zlabel'):
        zlabel = kwargs['zlabel']
        kwargs.pop('zlabel')
    else:
        zlabel = None
    if kwargs.has_key('extent'):
        extent = kwargs['extent']
        kwargs.pop('extent')
    else:
        if kwargs.has_key('xrange'):
            xlimits = list(kwargs['xrange'])
            kwargs.pop('xrange')
        else:
            xlimits = [0, X.shape[0]]
        if kwargs.has_key('yrange'):
            ylimits = list(kwargs['yrange'])
            kwargs.pop('yrange')
        else:
            ylimits = [0, X.shape[1]]
        extent = xlimits + ylimits
    if not kwargs.has_key('aspect'):
        kwargs['aspect'] = (xlimits[1] - xlimits[0]) / float(ylimits[1] -
                                                             ylimits[0])
    if kwargs.has_key('noaxes'):
        noaxes = kwargs['noaxes']
        kwargs.pop('noaxes')
    else:
        noaxes = False
    if (kwargs.has_key('contours') and kwargs['contours']) or \
            kwargs.has_key('levels') or \
            (kwargs.has_key('cntrmass') and kwargs['cntrmass']):
        contours = True
    else:
        contours = False
    if kwargs.has_key('contours'): kwargs.pop('contours')
    if kwargs.has_key('levels'):
        levels = kwargs['levels']
        kwargs.pop('levels')
    elif contours:
        if kwargs.has_key('cntrmass') and kwargs['cntrmass']:
            levels = sc.linspace(0., 1., _DEFAULTNCNTR)
        elif True in sc.isnan(sc.array(X)):
            levels = sc.linspace(sc.nanmin(X), sc.nanmax(X), _DEFAULTNCNTR)
        else:
            levels = sc.linspace(sc.amin(X), sc.amax(X), _DEFAULTNCNTR)
    if kwargs.has_key('cntrmass') and kwargs['cntrmass']:
        cntrmass = True
        kwargs.pop('cntrmass')
    else:
        cntrmass = False
        if kwargs.has_key('cntrmass'): kwargs.pop('cntrmass')
    if kwargs.has_key('cntrcolors'):
        cntrcolors = kwargs['cntrcolors']
        kwargs.pop('cntrcolors')
    elif contours:
        cntrcolors = 'k'
    if kwargs.has_key('cntrlabel') and kwargs['cntrlabel']:
        cntrlabel = True
        kwargs.pop('cntrlabel')
    else:
        cntrlabel = False
        if kwargs.has_key('cntrlabel'): kwargs.pop('cntrlabel')
    if kwargs.has_key('cntrlw'):
        cntrlw = kwargs['cntrlw']
        kwargs.pop('cntrlw')
    elif contours:
        cntrlw = None
    if kwargs.has_key('cntrls'):
        cntrls = kwargs['cntrls']
        kwargs.pop('cntrls')
    elif contours:
        cntrls = None
    if kwargs.has_key('cntrlabelsize'):
        cntrlabelsize = kwargs['cntrlabelsize']
        kwargs.pop('cntrlabelsize')
    elif contours:
        cntrlabelsize = None
    if kwargs.has_key('cntrlabelcolors'):
        cntrlabelcolors = kwargs['cntrlabelcolors']
        kwargs.pop('cntrlabelcolors')
    elif contours:
        cntrlabelcolors = None
    if kwargs.has_key('cntrinline'):
        cntrinline = kwargs['cntrinline']
        kwargs.pop('cntrinline')
    elif contours:
        cntrinline = None
    if kwargs.has_key('retCumImage'):
        retCumImage = kwargs['retCumImage']
        kwargs.pop('retCumImage')
    else:
        retCumImage = False
    if kwargs.has_key('colorbar'):
        cb = kwargs['colorbar']
        kwargs.pop('colorbar')
    else:
        cb = False
    if kwargs.has_key('shrink'):
        shrink = kwargs['shrink']
        kwargs.pop('shrink')
    else:
        shrink = None
    if kwargs.has_key('onedhists'):
        onedhists = kwargs['onedhists']
        kwargs.pop('onedhists')
    else:
        onedhists = False
    if kwargs.has_key('onedhistcolor'):
        onedhistcolor = kwargs['onedhistcolor']
        kwargs.pop('onedhistcolor')
    else:
        onedhistcolor = 'k'
    if kwargs.has_key('retAxes'):
        retAxes = kwargs['retAxes']
        kwargs.pop('retAxes')
    else:
        retAxes = False
    if onedhists:
        if overplot: fig = pyplot.gcf()
        else: fig = pyplot.figure()
        nullfmt = NullFormatter()  # no labels
        # definitions for the axes
        left, width = 0.1, 0.65
        bottom, height = 0.1, 0.65
        bottom_h = left_h = left + width
        rect_scatter = [left, bottom, width, height]
        rect_histx = [left, bottom_h, width, 0.2]
        rect_histy = [left_h, bottom, 0.2, height]
        axScatter = pyplot.axes(rect_scatter)
        axHistx = pyplot.axes(rect_histx)
        axHisty = pyplot.axes(rect_histy)
        # no labels
        axHistx.xaxis.set_major_formatter(nullfmt)
        axHistx.yaxis.set_major_formatter(nullfmt)
        axHisty.xaxis.set_major_formatter(nullfmt)
        axHisty.yaxis.set_major_formatter(nullfmt)
        fig.sca(axScatter)
    ax = pyplot.gca()
    ax.set_autoscale_on(False)
    out = pyplot.imshow(X, extent=extent, **kwargs)
    pyplot.axis(extent)
    _add_axislabels(xlabel, ylabel)
    _add_ticks()
    #Add colorbar
    if cb:
        if shrink is None:
            if kwargs.has_key('aspect'):
                shrink = sc.amin([float(kwargs['aspect']) * 0.87, 1.])
            else:
                shrink = 0.87
        CB1 = pyplot.colorbar(out, shrink=shrink)
        if not zlabel is None:
            if zlabel[0] != '$':
                thiszlabel = r'$' + zlabel + '$'
            else:
                thiszlabel = zlabel
            CB1.set_label(zlabel)
    if contours or retCumImage:
        if kwargs.has_key('aspect'):
            aspect = kwargs['aspect']
        else:
            aspect = None
        if kwargs.has_key('origin'):
            origin = kwargs['origin']
        else:
            origin = None
        if cntrmass:
            #Sum from the top down!
            X[sc.isnan(X)] = 0.
            sortindx = sc.argsort(X.flatten())[::-1]
            cumul = sc.cumsum(sc.sort(X.flatten())[::-1]) / sc.sum(X.flatten())
            cntrThis = sc.zeros(sc.prod(X.shape))
            cntrThis[sortindx] = cumul
            cntrThis = sc.reshape(cntrThis, X.shape)
        else:
            cntrThis = X
        if contours:
            cont = pyplot.contour(cntrThis,
                                  levels,
                                  colors=cntrcolors,
                                  linewidths=cntrlw,
                                  extent=extent,
                                  aspect=aspect,
                                  linestyles=cntrls,
                                  origin=origin)
            if cntrlabel:
                pyplot.clabel(cont,
                              fontsize=cntrlabelsize,
                              colors=cntrlabelcolors,
                              inline=cntrinline)
    if noaxes:
        ax.set_axis_off()
    #Add onedhists
    if not onedhists:
        if retCumImage:
            return cntrThis
        elif retAxes:
            return pyplot.gca()
        else:
            return out
    histx = sc.nansum(X.T, axis=1) * m.fabs(ylimits[1] - ylimits[0]) / X.shape[
        1]  #nansum bc nan is *no dens value*
    histy = sc.nansum(X.T,
                      axis=0) * m.fabs(xlimits[1] - xlimits[0]) / X.shape[0]
    histx[sc.isnan(histx)] = 0.
    histy[sc.isnan(histy)] = 0.
    dx = (extent[1] - extent[0]) / float(len(histx))
    axHistx.plot(sc.linspace(extent[0] + dx, extent[1] - dx, len(histx)),
                 histx,
                 drawstyle='steps-mid',
                 color=onedhistcolor)
    dy = (extent[3] - extent[2]) / float(len(histy))
    axHisty.plot(histy,
                 sc.linspace(extent[2] + dy, extent[3] - dy, len(histy)),
                 drawstyle='steps-mid',
                 color=onedhistcolor)
    axHistx.set_xlim(axScatter.get_xlim())
    axHisty.set_ylim(axScatter.get_ylim())
    axHistx.set_ylim(0, 1.2 * sc.amax(histx))
    axHisty.set_xlim(0, 1.2 * sc.amax(histy))
    if retCumImage:
        return cntrThis
    elif retAxes:
        return (axScatter, axHistx, axHisty)
    else:
        return out
예제 #27
0
def veldist_1d_apogee(plotfilename,l=250./180.*m.pi,d=0.25,
                      ngrid=201,saveDir='../bar/apogee/'):
    """
    NAME:
       veldist_1d_apogee
    PURPOSE:
       make a plot showing a 1d velocity distribution in an apogee los
    INPUT:
       plotfilename - filename for figure
       l - Galactic longitude
       d - distance from the Sun
       ngrid - number of grid-points to calculate the los velocity distribution
               on
       saveDir - save pickles here
    OUTPUT:
       Figure in plotfilename
    HISTORY:
       2010-05-28 - Written - Bovy (NYU)
    """
    vloslinspace= (-.9,.9,ngrid)
    vloss= sc.linspace(*vloslinspace)

    basesavefilename= os.path.join(saveDir,'apogee_')
    thissavefilename= basesavefilename+'%.2f_%.2f.sav' % (d,l)
    if os.path.exists(thissavefilename):
        print "Restoring apogee los-velocity distribution at d,l = %.2f,%.2f" % (d,l)
        savefile= open(thissavefilename,'r')
        vlosd= pickle.load(savefile)
        axivlosd= pickle.load(savefile)
        savefile.close()
    else:
        print "Calculating apogee los-velocity distribution at d,l = %.2f,%.2f" % (d,l)
        potparams= (0.9,0.01,25.*_degtorad,.8,None)
        vlosd= predictVlos(vloslinspace,
                           l=l,
                           d=d,
                           distCoord='sun',
                           pot='bar',beta=0.,
                           potparams=potparams)
        vlosd= vlosd/(sc.nansum(vlosd)*(vloss[1]-vloss[0]))
        potparams= (0.9,0.00,25.*_degtorad,.8,None)
        axivlosd= predictVlos(vloslinspace,
                              l=l,
                              d=d,
                              t=0.01,
                              distCoord='sun',
                              pot='bar',beta=0.,
                              potparams=potparams)
        axivlosd= axivlosd/(sc.nansum(axivlosd)*(vloss[1]-vloss[0]))
        savefile= open(thissavefilename,'w')
        pickle.dump(vlosd,savefile)
        pickle.dump(axivlosd,savefile)
        savefile.close()
    #Plot
    plot.bovy_print()
    plot.bovy_plot(vloss,vlosd,'k',
                   xlabel=r'$v_{\mathrm{los}} / v_0$',zorder=3)
    plot.bovy_plot(vloss,axivlosd,ls='-',color='0.5',
                   overplot=True,zorder=2)
    thisax= pyplot.gca()
    thisax.set_xlim(vloslinspace[0],vloslinspace[1])
    thisax.set_ylim(0.,sc.amax(sc.concatenate((axivlosd,vlosd)))*1.1)
    plot.bovy_text(r'$d = %.2f R_0$' % d + '\n'+r'$l = %.0f^\circ$' %(l/m.pi*180.),
                       top_right=True)
    plot.bovy_end_print(plotfilename)
예제 #28
0
def veldist_1d_convolve(plotfilename,phi=_DEFAULTPHI,R=_DEFAULTR,
                        ngrid=201,saveDir='../bar/1dvar/'):
    """
    NAME:
       veldist_1d_convolve
    PURPOSE:
       make a plot showing the influence of the distance uncertainties
    INPUT:
       plotfilename - filename for figure
       phi - Galactocentric azimuth
       R - Galactocentric radius
       ngrid - number of grid-points to calculate the los velocity distribution
               on
       saveDir - save pickles here
    OUTPUT:
       Figure in plotfilename
    HISTORY:
       2010-05-15 - Written - Bovy (NYU)
    """
    convolves= [0.,0.2,0.3]

    vloslinspace= (-.9,.9,ngrid)
    vloss= sc.linspace(*vloslinspace)

    vlosds= []
    basesavefilename= os.path.join(saveDir,'convolve_')
    for distsig in convolves:
        thissavefilename= basesavefilename+'%.1f.sav' % distsig
        if os.path.exists(thissavefilename):
            print "Restoring los-velocity distribution at distance uncertainties %.1f" % distsig
            savefile= open(thissavefilename,'r')
            vlosd= pickle.load(savefile)
            savefile.close()
        else:
            print "Calculating los-velocity distribution at distance uncertainties %.1f" % distsig
            potparams= (0.9,0.01,25.*_degtorad,.8,None)
            if distsig == 0.:
                vlosd= predictVlos(vloslinspace,
                                   l=phi,
                                   d=R,
                                   distCoord='GCGC',
                                   pot='bar',beta=0.,
                                   potparams=potparams)
            else:
                vlosd= predictVlosConvolve(vloslinspace,
                                           l=phi,
                                           d=R,
                                           distCoord='GCGC',
                                           pot='bar',beta=0.,
                                           potparams=potparams,
                                           convolve=distsig)
            vlosd= vlosd/(sc.nansum(vlosd)*(vloss[1]-vloss[0]))
            savefile= open(thissavefilename,'w')
            pickle.dump(vlosd,savefile)
            savefile.close()
        vlosds.append(vlosd)
    #Plot
    plot.bovy_print()
    plot.bovy_plot(vloss,vlosds[0],'k-',zorder=3,
                   xrange=[vloslinspace[0],vloslinspace[1]],
                   yrange=[0.,sc.nanmax(sc.array(vlosds).flatten())*1.1],
                   xlabel=r'$v_{\mathrm{los}} / v_0$')
    plot.bovy_plot(vloss,vlosds[1],ls='-',color='0.75',
                   overplot=True,zorder=2,lw=2.)
    plot.bovy_plot(vloss,vlosds[2],ls='-',color='0.6',
                   overplot=True,zorder=2,lw=2.)
    #plot.bovy_plot(vloss,vlosds[3],ls='-',color='0.45',
    #               overplot=True,zorder=2,lw=2.)
    plot.bovy_text(r'$\mathrm{distance\ uncertainties}$',title=True)
    plot.bovy_text(0.5,.65,r'$\sigma_d = 0$'+'\n'+r'$\sigma_d = 20 \%$'+'\n'+r'$\sigma_d = 30 \%$')
    plot.bovy_end_print(plotfilename)
예제 #29
0
    def entropy(x):
        """compute entropy of a discrete random variable"""

        return -sp.nansum(x * sp.log(x))
예제 #30
0
def veldist_1d_df(plotfilename,phi=_DEFAULTPHI,R=_DEFAULTR,
                  ngrid=201,saveDir='../bar/1dvar/'):
    """
    NAME:
       veldist_1d_df
    PURPOSE:
       make a plot showing the influence of the DF
    INPUT:
       plotfilename - filename for figure
       phi - Galactocentric azimuth
       R - Galactocentric radius
       ngrid - number of grid-points to calculate the los velocity distribution
               on
       saveDir - save pickles here
    OUTPUT:
       Figure in plotfilename
    HISTORY:
       2010-05-15 - Written - Bovy (NYU)
    """
    dftypes= ['dehnen','dehnen','dehnen','dehnen','shu']
    scalelengths= [1./3.,1./3.,1./4.,4./10.,1./3]
    sigscales= [1.,2./3.,3./4.,12./10.,1.]
    
    vloslinspace= (-.9,.9,ngrid)
    vloss= sc.linspace(*vloslinspace)

    vlosds= []
    basesavefilename= os.path.join(saveDir,'df_')
    ndfs= len(dftypes)
    for ii in range(ndfs):
        thissavefilename= basesavefilename+dftypes[ii]+'_%.3f_%.3f.sav' % (scalelengths[ii],sigscales[ii])
        if os.path.exists(thissavefilename):
            print "Restoring los-velocity distribution at df: "+dftypes[ii]+' %.3f and %.3f' % (scalelengths[ii],sigscales[ii])
            savefile= open(thissavefilename,'r')
            vlosd= pickle.load(savefile)
            savefile.close()
        else:
            print "Calculating los-velocity distribution at df: "+dftypes[ii]+' %.3f and %.3f' % (scalelengths[ii],sigscales[ii])
            potparams= (0.9,0.01,25.*_degtorad,.8,None)
            dftype= dftypes[ii]
            dfparams= (scalelengths[ii],sigscales[ii],0.2)
            vlosd= predictVlos(vloslinspace,
                               l=phi,
                               d=R,
                               distCoord='GCGC',
                               pot='bar',beta=0.,
                               potparams=potparams,
                               dftype=dftype,dfparams=dfparams)
            vlosd= vlosd/(sc.nansum(vlosd)*(vloss[1]-vloss[0]))
            savefile= open(thissavefilename,'w')
            pickle.dump(vlosd,savefile)
            savefile.close()
        vlosds.append(vlosd)
    #Plot
    plot.bovy_print()
    plot.bovy_plot(vloss,vlosds[0],'k-',zorder=3,
                   xrange=[vloslinspace[0],vloslinspace[1]],
                   yrange=[0.,sc.amax(sc.array(vlosds).flatten())*1.1],
                   xlabel=r'$v_{\mathrm{los}} / v_0$')
    plot.bovy_plot(vloss,vlosds[1],ls='-',color='0.75',
                   overplot=True,zorder=2,lw=2.)
    plot.bovy_plot(vloss,vlosds[2],ls='-',color='0.60',
                   overplot=True,zorder=2,lw=2.)
    plot.bovy_plot(vloss,vlosds[3],ls='-',color='0.45',
                   overplot=True,zorder=2,lw=1.5)
    plot.bovy_plot(vloss,vlosds[4],ls='-',color='0.3',
                   overplot=True,zorder=2,lw=1.5)
    plot.bovy_text(r'$\mathrm{distribution\ function}$',title=True)
    plot.bovy_text(0.53,.3,r'$R_s = 0.25 R_0$'+'\n'
                   +r'$R_{\sigma} = 2 R_s$'+'\n'
                   +r'$\mathrm{fiducial}$'+'\n'
                   +r'$\mathrm{Shu\ DF}$'+'\n'
                   +r'$R_s = 0.4 R_0$',size=10.)
    plot.bovy_end_print(plotfilename)
예제 #31
0
파일: analysis.py 프로젝트: firasm/CEST
def fit_px_cest(scn_to_analyse, xval, yval, fitrounds = 1):

    scn = sarpy.Scan(scn_to_analyse)
    pdata_num = 0 

    # Defining parameters
    freq_list = scn.method.CEST_FreqListPPM

    ppm_limit_min = -50
    ppm_limit_max = 50
    exclude_ppm = 200
    normalize_to_ppm = 66.6

    possibleNormalizations = [i for i, x in enumerate(freq_list) if numpy.abs(x - normalize_to_ppm) <1E-4]
    normalizeTo = possibleNormalizations[-1]

    # Get only the frequencies within the ppm_limit
    ppm_filtered = [f for f in freq_list if ppm_limit_max > f > ppm_limit_min]

    # Exclude the dummy frequencies at the beginning (66.6 ppm)
    ppm_filtered = sorted([n for n in ppm_filtered if n!= exclude_ppm])

    # Get the index of the good frequencies relative to the original list 
    ppm_filtered_ind = [freq_list.index(c) for c in ppm_filtered]  

    # get the freqs that'll be used for water fit
    water_fit_freqs = [f for f in ppm_filtered if (numpy.abs(f)< 3.)]
    water_fit_freqs_ind = sorted([ppm_filtered.index(c) for c in water_fit_freqs])

    # Create some empty arrays
    water_shifts = numpy.zeros(shape=(1))  + numpy.nan
    new_shifted = numpy.zeros(shape=(1)) + numpy.nan

    newstruct = numpy.zeros((1), dtype=[('offset', 'float64'),
       ('A1', 'float64'),('w1', 'float64'),('p1', 'float64'),
       ('A2', 'float64'),('w2', 'float64'),('p2', 'float64'),
       ('A3', 'float64'),('w3', 'float64'),('p3', 'float64'),
       ('A4', 'float64'),('w4', 'float64'),('p4', 'float64'),
       ('water_A', 'float64'),('water_w', 'float64'),('water_p', 'float64')])

    # Fit count, this counts the number of rounds the data has been fit
    fitcount = 0

    while fitcount < fitrounds:
        # Get the data and normalize it to index of normalize_to_ppm
        tmp = scn.pdata[0].data[xval,yval,:][ppm_filtered_ind] / scn.pdata[0].data[xval,yval,normalizeTo]           
   
        # First do the water fit and shift the data so water is at 0  
        shiftParams = fit_water_peak(tmp[water_fit_freqs_ind],water_fit_freqs,allParams=True)
        shift = shiftParams[3]

        # Interpolating the Y-data so that it gets shifted to the acquired offsets!
        if numpy.isfinite(shift):
            s_shifted_back = scipy.interp(ppm_filtered, ppm_filtered+shift/2, tmp)
            new_shifted = s_shifted_back       
        else:
            print(shift)
            pass            

        if fitcount>0: # Use parameters from last fit 
            testParams = h_convertBetweenStructArrays(newstruct,toType='array')

        else: # Get initial starting parameters
            testParams = get_neighbours_starting()
            testParams = h_convertBetweenStructArrays(testParams,toType = 'array')

        fit_params,cov,infodict,mesg,ier = scipy.optimize.leastsq(
                                                    h_residual_Zspectrum_N,
                                                    testParams,
                                                    args=(new_shifted, ppm_filtered), 
                                                    full_output = True,
                                                    maxfev = 900,
                                                    ftol =1E-9)
        newstruct['offset'] = fit_params[0]
        newstruct['A1'] = fit_params[1]
        newstruct['w1'] = fit_params[2]
        newstruct['p1'] = fit_params[3]
        newstruct['A2'] = fit_params[4]
        newstruct['w2'] = fit_params[5]
        newstruct['p2'] = fit_params[6]
        newstruct['A3'] = fit_params[7]
        newstruct['w3'] = fit_params[8]
        newstruct['p3'] = fit_params[9]
        newstruct['A4'] = fit_params[10]
        newstruct['w4'] = fit_params[11]
        newstruct['p4'] = fit_params[12]
        newstruct['water_A'] = fit_params[13]
        newstruct['water_w'] = fit_params[14]
        newstruct['water_p'] = fit_params[15]
      
        fitcount+=1
    freqs = numpy.arange(-10,10,0.1)
    fit_quality = scipy.nansum(numpy.abs(new_shifted - h_zspectrum_N(fit_params,ppm_filtered)))

    return {'fit_params':newstruct, 
            'data': [ppm_filtered, new_shifted],
            'fitdata': [freqs, h_zspectrum_N(newstruct, freqs)],
            'fit_quality': fit_quality}
예제 #32
0
def calculate_avg():													#DONE

	global data_avg
	global data_std
	global nb_rows
	global nb_cols
	
	#calculate raw average
	#---------------------
	data_avg = np.zeros((nb_rows,nb_cols))
	if len(args.xvgfilenames) > 1:
		data_std = np.zeros((nb_rows,nb_cols-1))
	data_avg[:,0] = first_col
	for col_index in range(1, nb_cols):
		col_name = columns_names[col_index-1]
		#initialise average with first file
		filename = args.xvgfilenames[0]
		tmp_col_nb = files_columns[filename]["leg2col"][col_name]
		tmp_col_avg = files_columns[filename]["data"][:,tmp_col_nb:tmp_col_nb+1] * files_columns[filename]["weight"] * len(args.xvgfilenames) / float(weight_sum)
				
		#add columns of following files
		for f_index in range(1,len(args.xvgfilenames)):
			filename = args.xvgfilenames[f_index]
			tmp_col_nb = files_columns[filename]["leg2col"][col_name]
			tmp_col_avg = np.concatenate([tmp_col_avg,files_columns[filename]["data"][:,tmp_col_nb:tmp_col_nb+1] * files_columns[filename]["weight"] * len(args.xvgfilenames) / float(weight_sum)], axis = 1)	
				
		if len(args.xvgfilenames) > 1:

			#calculate weighted average taking into account "nan"
			#----------------------------------------------------
			data_avg[:,col_index] =  scipy.stats.nanmean(tmp_col_avg, axis = 1)
						
			#calculate unbiased weighted std dev taking into account "nan"
			#-------------------------------------------------------------
			#initialise average with first file
			filename = args.xvgfilenames[0]
			tmp_col_nb = files_columns[filename]["leg2col"][col_name]
			tmp_col_std = files_columns[filename]["weight"] * (files_columns[filename]["data"][:,tmp_col_nb:tmp_col_nb+1] - data_avg[:,col_index:col_index+1])**2
			tmp_weigh2_sum = files_columns[filename]["weight"]**2
			
			#add columns of following files
			for f_index in range(1,len(args.xvgfilenames)):
				filename = args.xvgfilenames[f_index]
				tmp_col_nb = files_columns[filename]["leg2col"][col_name]
				tmp_col_std = np.concatenate([tmp_col_std, files_columns[filename]["weight"] * (files_columns[filename]["data"][:,tmp_col_nb:tmp_col_nb+1] - data_avg[:,col_index:col_index+1])**2], axis = 1)	
				tmp_weigh2_sum += files_columns[filename]["weight"]**2
						
			#calculate unbiased standard deviation as defined on wikipedia: https://en.wikipedia.org/wiki/Weighted_variance#Weighted_sample_variance
			tmp_col_std = np.sqrt(weight_sum / float(weight_sum**2 - tmp_weigh2_sum) * scipy.nansum(tmp_col_std, axis = 1))
			data_std[:,col_index-1] = tmp_col_std

		else:
			data_avg[:,col_index] = tmp_col_avg[:,0]
			
	#update by smoothing
	#-------------------
	if args.nb_smoothing > 1:
		nb_rows = nb_rows - args.nb_smoothing + 1
		tmp_data_avg_smoothed = np.zeros((nb_rows,nb_cols))
		tmp_data_std_smoothed = np.zeros((nb_rows,nb_cols-1))
		tmp_data_avg_smoothed[:,0] = np.transpose(rolling_avg(np.transpose(data_avg[:,0]))[0])

		for col_index in range(1, nb_cols):
			tmp_avg, tmp_std =  rolling_avg(np.transpose(data_avg[:,col_index]))
			tmp_data_avg_smoothed[:,col_index] = np.transpose(tmp_avg)
			
			#if one file the std correspond to the fluctuation around the smooth value
			if len(args.xvgfilenames) == 1:
				tmp_data_std_smoothed[:,col_index-1] = np.transpose(tmp_std)
			#if several files the std correspond to the smoothing of the std obtained when calculating the files average
			else:
				tmp_data_std_smoothed[:,col_index-1] = np.transpose(rolling_avg(np.transpose(data_std[:,col_index-1])))
		
		data_avg = tmp_data_avg_smoothed
		data_std = tmp_data_std_smoothed
	
	#update by skipping
	#------------------
	if args.nb_skipping > 1 :
		rows_to_keep = [r for r in range(0,nb_rows) if r%args.nb_skipping ==0]
		nb_rows = len(rows_to_keep)
		data_avg = data_avg[rows_to_keep,:]
		if len(args.xvgfilenames) > 1:
			data_std = data_std[rows_to_keep,:]
	
	#replace nan values if necessary
	#-------------------------------
	if args.nan2num != "no":
		data_avg[np.isnan(data_avg)] = args.nan2num
		if len(args.xvgfilenames) > 1:
			data_std[np.isnan(data_std)] = args.nan2num
	
	return
예제 #33
0
def lowerBound(x1, x2, w1, w2, y, tau):
    xw = SP.outer(x1, w1)
    return SP.nansum(tau * (y**2 + SP.outer(x2, w2) - 2 * xw * y))
예제 #34
0
def apogee_figures(
    plotfilename,
    savefilename=None,
    bar_angle=25.0,
    dt=None,
    l=None,
    rolr=None,
    bar_strength=None,
    slope=None,
    vlosgrid=201,
    dgrid=101,
    conditional=False,
    dmax=10.0 / 8.0,
):
    """
    NAME:
       apogee_figures
    PURPOSE:
       make a vlos-d plot for APOGEE
    INPUT:
       plotfilename - name of the file the figure will be saved to
       savefilename - name of the file the velocity distributions will
                      be saved to
       bar_angle= angle between the GC-Sun lin and the bar major axis
       dt= - time to integrate for (in bar-periods)
       l= - Galactic longitude
       rolr= radius of outer lindblad radius
       bar_strength= strength of the bar
       slop= slope of the rotation curve (power-law index)
       conditional= if True, normalize each velocity distribution independently
    OUTPUT:
       saves velocity distributions to pickle save file and produces plot
    HISTORY:
       2011-03-19 - Written - Bovy (NYU)
    """
    # Grid
    vlosgrid = _VLOSGRID
    dgrid = _DGRID
    vloslinspace = (-0.9, 0.9, vlosgrid)
    vloss = sc.linspace(*vloslinspace)
    dlinspace = (0.0001, dmax, dgrid)
    ds = sc.linspace(*dlinspace)

    # Set up parameters
    potparams = (rolr, bar_strength, bar_angle * _degtorad, 0.8, None)

    if os.path.exists(savefilename):
        savefile = open(savefilename, "rb")
        vlosds = pickle.load(savefile)
        dd = pickle.load(savefile)
        savefile.close()
    else:
        vlosds = []
        dd = 0

    while dd < dgrid:
        print "Working on %i / %i ..." % (dd + 1, dgrid)
        # Calculate vlos for this distance
        if dt is None:
            vlosd = predictVlos(
                vloslinspace, l=(l * _degtorad), d=ds[dd], distCoord="Sun", pot="bar", beta=slope, potparams=potparams
            )
        else:
            vlosd = predictVlos(
                vloslinspace,
                l=(l * _degtorad),
                d=ds[dd],
                distCoord="Sun",
                pot="bar",
                beta=slope,
                potparams=potparams,
                t=dt,
            )
        vlosds.append(vlosd)
        dd += 1
        # Save
        savefile = open(savefilename, "wb")
        pickle.dump(vlosds, savefile)
        pickle.dump(dd, savefile)
        savefile.close()

    # Plot
    if conditional:
        newvlosds = []
        for vlosd in vlosds:
            newvlosds.append(vlosd / (sc.nansum(vlosd) * (vloss[1] - vloss[0])))
        vlosds = newvlosds
    bovy_plot.bovy_print()
    bovy_plot.bovy_dens2d(
        sc.array(vlosds).T,
        origin="lower",
        cmap="gist_yarg",
        aspect=0.66,
        xlabel=r"$d\ /\ R_0$",
        ylabel=r"$(v_{\mathrm{los}} - \vec{v}_c \cdot \vec{d})\ /\ v_0$",
        yrange=sc.array([vloslinspace[0], vloslinspace[1]]),
        xrange=sc.array([dlinspace[0], dlinspace[1]]),
        contours=True,
        cntrmass=True,
        cntrcolors=["w", "w", "w", "w", "w", "k", "k", "k", "k", "k"],
    )
    if bar_strength == 0.0:
        bovy_plot.bovy_text(r"$l = %i^\circ$" % int(l), top_right=True)
    else:
        bovy_plot.bovy_text(
            r"$l = %i^\circ$" % int(l)
            + "\n"
            + r"$R_{\mathrm{OLR}} = %3.1f$" % rolr
            + "\n"
            + r"$\alpha = %5.3f$" % bar_strength
            + "\n"
            + r"$\phi_{\mathrm{bar}} = %i^\circ$" % int(bar_angle),
            top_right=True,
        )
    bovy_plot.bovy_end_print(plotfilename)
예제 #35
0
    def processdata(self):
        """ This will perform the the data processing and create the ACF estimates
        for both the data and noise.
        Inputs:
        timevec - A numpy array of times in seconds where the integration will begin.
        inttime - The integration time in seconds.
        lagfunc - A function that will make the desired lag products.
        Outputs:
        DataLags: A dictionary with keys 'Power' 'ACF','RG','Pulses' that holds
        the numpy arrays of the data.
        NoiseLags: A dictionary with keys 'Power' 'ACF','RG','Pulses' that holds
        the numpy arrays of the data.
        """
        timevec = self.simparams['Timevec'] + self.timeoffset
        inttime = self.simparams['Tint']
        # Get array sizes

        NNs = int(self.simparams['NNs'])
        range_gates = self.simparams['Rangegates']
        N_rg = len(range_gates)  # take the size
        pulse = self.simparams['Pulse']
        Pulselen = len(pulse)
        N_samps = N_rg + Pulselen - 1
        simdtype = self.simparams['dtype']
        Ntime = len(timevec)

        if 'outangles' in self.simparams.keys():
            Nbeams = len(self.simparams['outangles'])
            inttime = inttime
        else:
            Nbeams = len(self.simparams['angles'])

        # Choose type of processing
        if self.simparams['Pulsetype'].lower() == 'barker':
            lagfunc = BarkerLag
            Nlag = 1
        else:
            lagfunc = CenteredLagProduct
            Nlag = Pulselen
        # initialize output arrays
        outdata = sp.zeros((Ntime, Nbeams, N_rg, Nlag), dtype=simdtype)
        outaddednoise = sp.zeros((Ntime, Nbeams, N_rg, Nlag), dtype=simdtype)
        outnoise = sp.zeros((Ntime, Nbeams, NNs - Pulselen + 1, Nlag),
                            dtype=simdtype)
        pulses = sp.zeros((Ntime, Nbeams))
        pulsesN = sp.zeros((Ntime, Nbeams))
        timemat = sp.zeros((Ntime, 2))
        Ksysvec = self.sensdict['Ksys']
        # set up arrays that hold the location of pulses that are to be processed together
        infoname = self.datadir / 'INFO.h5'
        # Just going to assume that the info file is in the directory
        infodict = h52dict(str(infoname))
        flist = infodict['Files']
        file_list = [str(self.datadir / i) for i in flist]
        pulsen_list = infodict['Pulses']
        beamn_list = infodict['Beams']
        time_list = infodict['Time']
        file_loclist = [
            ifn * sp.ones(len(ifl)) for ifn, ifl in enumerate(beamn_list)
        ]
        if 'NoiseTime' in infodict.keys():
            sridata = True
            tnoiselist = infodict['NoiseTime']
            nfile_loclist = [
                ifn * sp.ones(len(ifl)) for ifn, ifl in enumerate(tnoiselist)
            ]
        else:
            sridata = False

        pulsen = sp.hstack(pulsen_list).astype(int)  # pulse number
        beamn = sp.hstack(beamn_list).astype(int)  # beam numbers
        ptimevec = sp.hstack(time_list).astype(float)  # time of each pulse
        file_loc = sp.hstack(file_loclist).astype(int)  # location in the file
        if sridata:
            ntimevec = sp.vstack(tnoiselist).astype(float)
            nfile_loc = sp.hstack(nfile_loclist).astype(int)
            outnoise = sp.zeros((Ntime, Nbeams, NNs - Pulselen + 1, Nlag),
                                dtype=simdtype)

        # run the time loop
        print("Forming ACF estimates")

        # For each time go through and read only the necisary files
        for itn, it in enumerate(timevec):
            update_progress(
                float(itn) / Ntime, "Time {0:d} of {1:d}".format(itn, Ntime))
            # do the book keeping to determine locations of data within the files
            cur_tlim = (it, it + inttime)
            curcases = sp.logical_and(ptimevec >= cur_tlim[0],
                                      ptimevec < cur_tlim[1])
            # SRI data Hack
            if sridata:
                curcases_n = sp.logical_and(ntimevec[:, 0] >= cur_tlim[0],
                                            ntimevec[:, 0] < cur_tlim[1])
                curfileloc_n = nfile_loc[curcases_n]
                curfiles_n = set(curfileloc_n)
            if not sp.any(curcases):
                update_progress(
                    float(itn) / Ntime,
                    "No pulses for time {0:d} of {1:d}, lagdata adjusted accordinly"
                    .format(itn, Ntime))
                outdata = outdata[:itn]
                outnoise = outnoise[:itn]
                pulses = pulses[:itn]
                pulsesN = pulsesN[:itn]
                timemat = timemat[:itn]
                continue
            pulseset = set(pulsen[curcases])
            poslist = [sp.where(pulsen == item)[0] for item in pulseset]
            pos_all = sp.hstack(poslist)
            try:
                pos_all = sp.hstack(poslist)
                curfileloc = file_loc[pos_all]
            except:
                pdb.set_trace()
            # Find the needed files and beam numbers
            curfiles = set(curfileloc)
            beamlocs = beamn[pos_all]
            timemat[itn, 0] = ptimevec[pos_all].min()
            timemat[itn, 1] = ptimevec[pos_all].max()
            # cur data pulls out all data from all of the beams and posisions
            curdata = sp.zeros((len(pos_all), N_samps), dtype=simdtype)
            curaddednoise = sp.zeros((len(pos_all), N_samps), dtype=simdtype)
            curnoise = sp.zeros((len(pos_all), NNs), dtype=simdtype)
            # Open files and get required data
            # XXX come up with way to get open up new files not have to reread in data that is already in memory
            for ifn in curfiles:
                curfileit = [
                    sp.where(pulsen_list[ifn] == item)[0] for item in pulseset
                ]
                curfileitvec = sp.hstack(curfileit)
                ifile = file_list[ifn]
                curh5data = h52dict(ifile)
                file_arlocs = sp.where(curfileloc == ifn)[0]
                curdata[file_arlocs] = curh5data['RawData'][curfileitvec]

                curaddednoise[file_arlocs] = curh5data['AddedNoise'].astype(
                    simdtype)[curfileitvec]
                # Read in noise data when you have don't have ACFs
                if not sridata:
                    curnoise[file_arlocs] = curh5data['NoiseData'].astype(
                        simdtype)[curfileitvec]
            #SRI data
            if sridata:
                curnoise = sp.zeros(
                    (len(curfileloc_n), Nbeams, NNs - Pulselen + 1, Pulselen),
                    dtype=simdtype)
                for ifn in curfiles_n:
                    curfileit_n = sp.where(
                        sp.logical_and(tnoiselist[ifn][:, 0] >= cur_tlim[0],
                                       tnoiselist[ifn][:, 0] < cur_tlim[1]))[0]
                    ifile = file_list[ifn]
                    curh5data_n = h52dict(ifile)
                    file_arlocs = sp.where(curfileloc_n == ifn)[0]
                    curnoise[file_arlocs] = curh5data_n['NoiseDataACF'][
                        curfileit_n]

            # differentiate between phased arrays and dish antennas
            if self.sensdict['Name'].lower() in ['risr', 'pfisr', 'risr-n']:
                # After data is read in form lags for each beam
                for ibeam in range(Nbeams):
                    update_progress(
                        float(itn) / Ntime + float(ibeam) / Ntime / Nbeams,
                        "Beam {0:d} of {1:d}".format(ibeam, Nbeams))
                    beamlocstmp = sp.where(beamlocs == ibeam)[0]
                    pulses[itn, ibeam] = len(beamlocstmp)

                    outdata[itn,
                            ibeam] = lagfunc(curdata[beamlocstmp].copy(),
                                             numtype=self.simparams['dtype'],
                                             pulse=pulse,
                                             lagtype=self.simparams['lagtype'])
                    if sridata:
                        pulsesN[itn, ibeam] = len(curnoise)
                        outnoise[itn, ibeam] = sp.nansum(curnoise[:, ibeam],
                                                         axis=0)
                    else:
                        pulsesN[itn, ibeam] = len(beamlocstmp)
                        outnoise[itn, ibeam] = lagfunc(
                            curnoise[beamlocstmp].copy(),
                            numtype=self.simparams['dtype'],
                            pulse=pulse,
                            lagtype=self.simparams['lagtype'])
                    outaddednoise[itn, ibeam] = lagfunc(
                        curaddednoise[beamlocstmp].copy(),
                        numtype=self.simparams['dtype'],
                        pulse=pulse,
                        lagtype=self.simparams['lagtype'])
            else:
                for ibeam, ibeamlist in enumerate(self.simparams['outangles']):
                    update_progress(
                        float(itn) / Ntime + float(ibeam) / Ntime / Nbeams,
                        "Beam {0:d} of {1:d}".format(ibeam, Nbeams))
                    beamlocstmp = sp.where(sp.in1d(beamlocs, ibeamlist))[0]
                    curbeams = beamlocs[beamlocstmp]
                    ksysmat = Ksysvec[curbeams]
                    ksysmean = Ksysvec[ibeamlist[0]]
                    inputdata = curdata[beamlocstmp].copy()
                    noisedata = curnoise[beamlocstmp].copy()
                    noisedataadd = curaddednoise[beamlocstmp].copy()
                    ksysmult = ksysmean / sp.tile(ksysmat[:, sp.newaxis],
                                                  (1, inputdata.shape[1]))
                    ksysmultn = ksysmean / sp.tile(ksysmat[:, sp.newaxis],
                                                   (1, noisedata.shape[1]))
                    ksysmultna = ksysmean / sp.tile(ksysmat[:, sp.newaxis],
                                                    (1, noisedataadd.shape[1]))
                    pulses[itn, ibeam] = len(beamlocstmp)
                    pulsesN[itn, ibeam] = len(beamlocstmp)
                    outdata[itn,
                            ibeam] = lagfunc(inputdata * ksysmult,
                                             numtype=self.simparams['dtype'],
                                             pulse=pulse,
                                             lagtype=self.simparams['lagtype'])
                    outnoise[itn, ibeam] = lagfunc(
                        noisedata * ksysmultn,
                        numtype=self.simparams['dtype'],
                        pulse=pulse,
                        lagtype=self.simparams['lagtype'])
                    outaddednoise[itn, ibeam] = lagfunc(
                        noisedataadd * ksysmultna,
                        numtype=self.simparams['dtype'],
                        pulse=pulse,
                        lagtype=self.simparams['lagtype'])
        # Create output dictionaries and output data
        DataLags = {
            'ACF': outdata,
            'Pow': outdata[:, :, :, 0].real,
            'Pulses': pulses,
            'Time': timemat,
            'AddedNoiseACF': outaddednoise
        }
        NoiseLags = {
            'ACF': outnoise,
            'Pow': outnoise[:, :, :, 0].real,
            'Pulses': pulsesN,
            'Time': timemat
        }
        return (DataLags, NoiseLags)
예제 #36
0
def bovy_dens2d(X,**kwargs):
    """
    NAME:

       bovy_dens2d

    PURPOSE:

       plot a 2d density with optional contours

    INPUT:

       first argument is the density

       matplotlib.pyplot.imshow keywords (see http://matplotlib.sourceforge.net/api/axes_api.html#matplotlib.axes.Axes.imshow)

       xlabel - (raw string!) x-axis label, LaTeX math mode, no $s needed

       ylabel - (raw string!) y-axis label, LaTeX math mode, no $s needed

       xrange

       yrange

       noaxes - don't plot any axes

       overplot - if True, overplot

       colorbar - if True, add colorbar

       shrink= colorbar argument: shrink the colorbar by the factor (optional)

       conditional - normalize each column separately (for probability densities, i.e., cntrmass=True)

       Contours:
       
       justcontours - if True, only draw contours

       contours - if True, draw contours (10 by default)

       levels - contour-levels

       cntrmass - if True, the density is a probability and the levels are probability masses contained within the contour

       cntrcolors - colors for contours (single color or array)

       cntrlabel - label the contours

       cntrlw, cntrls - linewidths and linestyles for contour

       cntrlabelsize, cntrlabelcolors,cntrinline - contour arguments

       cntrSmooth - use ndimage.gaussian_filter to smooth before contouring

       onedhists - if True, make one-d histograms on the sides

       onedhistcolor - histogram color

       retAxes= return all Axes instances

       retCont= return the contour instance

    OUTPUT:

       plot to output device, Axes instances depending on input

    HISTORY:

       2010-03-09 - Written - Bovy (NYU)

    """
    overplot= kwargs.pop('overplot',False)
    if not overplot:
        pyplot.figure()
    xlabel= kwargs.pop('xlabel',None)
    ylabel= kwargs.pop('ylabel',None)
    zlabel= kwargs.pop('zlabel',None)
    if 'extent' in kwargs:
        extent= kwargs.pop('extent')
    else:
        xlimits= kwargs.pop('xrange',[0,X.shape[1]])
        ylimits= kwargs.pop('yrange',[0,X.shape[0]])
        extent= xlimits+ylimits
    if not 'aspect' in kwargs:
        kwargs['aspect']= (xlimits[1]-xlimits[0])/float(ylimits[1]-ylimits[0])
    noaxes= kwargs.pop('noaxes',False)
    justcontours= kwargs.pop('justcontours',False)
    if ('contours' in kwargs and kwargs['contours']) or \
            'levels' in kwargs or justcontours or \
            ('cntrmass' in kwargs and kwargs['cntrmass']):
        contours= True
    else:
        contours= False
    kwargs.pop('contours',None)
    if 'levels' in kwargs:
        levels= kwargs['levels']
        kwargs.pop('levels')
    elif contours:
        if 'cntrmass' in kwargs and kwargs['cntrmass']:
            levels= sc.linspace(0.,1.,_DEFAULTNCNTR)
        elif True in sc.isnan(sc.array(X)):
            levels= sc.linspace(sc.nanmin(X),sc.nanmax(X),_DEFAULTNCNTR)
        else:
            levels= sc.linspace(sc.amin(X),sc.amax(X),_DEFAULTNCNTR)
    cntrmass= kwargs.pop('cntrmass',False)
    conditional= kwargs.pop('conditional',False)
    cntrcolors= kwargs.pop('cntrcolors','k')
    cntrlabel= kwargs.pop('cntrlabel',False)
    cntrlw= kwargs.pop('cntrlw',None)
    cntrls= kwargs.pop('cntrls',None)
    cntrSmooth= kwargs.pop('cntrSmooth',None)
    cntrlabelsize= kwargs.pop('cntrlabelsize',None)
    cntrlabelcolors= kwargs.pop('cntrlabelcolors',None)
    cntrinline= kwargs.pop('cntrinline',None)
    retCumImage= kwargs.pop('retCumImage',False)
    cb= kwargs.pop('colorbar',False)
    shrink= kwargs.pop('shrink',None)
    onedhists= kwargs.pop('onedhists',False)
    onedhistcolor= kwargs.pop('onedhistcolor','k')
    retAxes= kwargs.pop('retAxes',False)
    retCont= kwargs.pop('retCont',False)
    if onedhists:
        if overplot: fig= pyplot.gcf()
        else: fig= pyplot.figure()
        nullfmt   = NullFormatter()         # no labels
        # definitions for the axes
        left, width = 0.1, 0.65
        bottom, height = 0.1, 0.65
        bottom_h = left_h = left+width
        rect_scatter = [left, bottom, width, height]
        rect_histx = [left, bottom_h, width, 0.2]
        rect_histy = [left_h, bottom, 0.2, height]
        axScatter = pyplot.axes(rect_scatter)
        axHistx = pyplot.axes(rect_histx)
        axHisty = pyplot.axes(rect_histy)
        # no labels
        axHistx.xaxis.set_major_formatter(nullfmt)
        axHistx.yaxis.set_major_formatter(nullfmt)
        axHisty.xaxis.set_major_formatter(nullfmt)
        axHisty.yaxis.set_major_formatter(nullfmt)
        fig.sca(axScatter)
    ax=pyplot.gca()
    ax.set_autoscale_on(False)
    if conditional:
        plotthis= X/sc.tile(sc.sum(X,axis=0),(X.shape[1],1))
    else:
        plotthis= X
    if not justcontours:
        out= pyplot.imshow(plotthis,extent=extent,**kwargs)
    if not overplot:
        pyplot.axis(extent)
        _add_axislabels(xlabel,ylabel)
        _add_ticks()
    #Add colorbar
    if cb and not justcontours:
        if shrink is None:
            shrink= sc.amin([float(kwargs.pop('aspect',1.))*0.87,1.])
        CB1= pyplot.colorbar(out,shrink=shrink)
        if not zlabel is None:
            if zlabel[0] != '$':
                thiszlabel=r'$'+zlabel+'$'
            else:
                thiszlabel=zlabel
            CB1.set_label(thiszlabel)
    if contours or retCumImage:
        aspect= kwargs.get('aspect',None)
        origin= kwargs.get('origin',None)
        if cntrmass:
            #Sum from the top down!
            plotthis[sc.isnan(plotthis)]= 0.
            sortindx= sc.argsort(plotthis.flatten())[::-1]
            cumul= sc.cumsum(sc.sort(plotthis.flatten())[::-1])/sc.sum(plotthis.flatten())
            cntrThis= sc.zeros(sc.prod(plotthis.shape))
            cntrThis[sortindx]= cumul
            cntrThis= sc.reshape(cntrThis,plotthis.shape)
        else:
            cntrThis= plotthis
        if contours:
            if not cntrSmooth is None:
                cntrThis= ndimage.gaussian_filter(cntrThis,cntrSmooth,
                                                  mode='nearest')
            cont= pyplot.contour(cntrThis,levels,colors=cntrcolors,
                                 linewidths=cntrlw,extent=extent,aspect=aspect,
                                 linestyles=cntrls,origin=origin)
            if cntrlabel:
                pyplot.clabel(cont,fontsize=cntrlabelsize,
                              colors=cntrlabelcolors,
                              inline=cntrinline)
    if noaxes:
        ax.set_axis_off()
    #Add onedhists
    if not onedhists:
        if retCumImage:
            return cntrThis
        elif retAxes:
            return pyplot.gca()
        elif retCont:
            return cont
        elif justcontours:
            return cntrThis
        else:
            return out
    histx= sc.nansum(X.T,axis=1)*m.fabs(ylimits[1]-ylimits[0])/X.shape[1] #nansum bc nan is *no dens value*
    histy= sc.nansum(X.T,axis=0)*m.fabs(xlimits[1]-xlimits[0])/X.shape[0]
    histx[sc.isnan(histx)]= 0.
    histy[sc.isnan(histy)]= 0.
    dx= (extent[1]-extent[0])/float(len(histx))
    axHistx.plot(sc.linspace(extent[0]+dx,extent[1]-dx,len(histx)),histx,
                 drawstyle='steps-mid',color=onedhistcolor)
    dy= (extent[3]-extent[2])/float(len(histy))
    axHisty.plot(histy,sc.linspace(extent[2]+dy,extent[3]-dy,len(histy)),
                 drawstyle='steps-mid',color=onedhistcolor)
    axHistx.set_xlim( axScatter.get_xlim() )
    axHisty.set_ylim( axScatter.get_ylim() )
    axHistx.set_ylim( 0, 1.2*sc.amax(histx))
    axHisty.set_xlim( 0, 1.2*sc.amax(histy))
    if retCumImage:
        return cntrThis
    elif retAxes:
        return (axScatter,axHistx,axHisty)
    elif justcontours:
        return cntrThis
    else:
        return out
예제 #37
0
파일: bovy_plot.py 프로젝트: ritabanc/galpy
def bovy_dens2d(X,**kwargs):
    """
    NAME:

       bovy_dens2d

    PURPOSE:

       plot a 2d density with optional contours

    INPUT:

       first argument is the density

       matplotlib.pyplot.imshow keywords (see http://matplotlib.sourceforge.net/api/axes_api.html#matplotlib.axes.Axes.imshow)

       xlabel - (raw string!) x-axis label, LaTeX math mode, no $s needed

       ylabel - (raw string!) y-axis label, LaTeX math mode, no $s needed

       xrange

       yrange

       noaxes - don't plot any axes

       overplot - if True, overplot

       colorbar - if True, add colorbar

       shrink= colorbar argument: shrink the colorbar by the factor (optional)

       Contours:
       
       contours - if True, draw contours (10 by default)

       levels - contour-levels

       cntrmass - if True, the density is a probability and the levels 
                  are probability masses contained within the contour

       cntrcolors - colors for contours (single color or array)

       cntrlabel - label the contours

       cntrlw, cntrls - linewidths and linestyles for contour

       cntrlabelsize, cntrlabelcolors,cntrinline - contour arguments

       onedhists - if True, make one-d histograms on the sides

       onedhistcolor - histogram color

       retAxes= return all Axes instances

    OUTPUT:

    HISTORY:

       2010-03-09 - Written - Bovy (NYU)

    """
    if kwargs.has_key('overplot'):
        overplot= kwargs['overplot']
        kwargs.pop('overplot')
    else:
        overplot= False
    if not overplot:
        pyplot.figure()
    if kwargs.has_key('xlabel'):
        xlabel= kwargs['xlabel']
        kwargs.pop('xlabel')
    else:
        xlabel=None
    if kwargs.has_key('ylabel'):
        ylabel= kwargs['ylabel']
        kwargs.pop('ylabel')
    else:
        ylabel=None
    if kwargs.has_key('zlabel'):
        zlabel= kwargs['zlabel']
        kwargs.pop('zlabel')
    else:
        zlabel=None   
    if kwargs.has_key('extent'):
        extent= kwargs['extent']
        kwargs.pop('extent')
    else:
        if kwargs.has_key('xrange'):
            xlimits=list(kwargs['xrange'])
            kwargs.pop('xrange')
        else:
            xlimits=[0,X.shape[0]]
        if kwargs.has_key('yrange'):
            ylimits=list(kwargs['yrange'])
            kwargs.pop('yrange')
        else:
            ylimits=[0,X.shape[1]]
        extent= xlimits+ylimits
    if not kwargs.has_key('aspect'):
        kwargs['aspect']= (xlimits[1]-xlimits[0])/float(ylimits[1]-ylimits[0])
    if kwargs.has_key('noaxes'):
        noaxes= kwargs['noaxes']
        kwargs.pop('noaxes')
    else:
        noaxes= False
    if (kwargs.has_key('contours') and kwargs['contours']) or \
            kwargs.has_key('levels') or \
            (kwargs.has_key('cntrmass') and kwargs['cntrmass']):
        contours= True
    else:
        contours= False
    if kwargs.has_key('contours'): kwargs.pop('contours')
    if kwargs.has_key('levels'):
        levels= kwargs['levels']
        kwargs.pop('levels')
    elif contours:
        if kwargs.has_key('cntrmass') and kwargs['cntrmass']:
            levels= sc.linspace(0.,1.,_DEFAULTNCNTR)
        elif True in sc.isnan(sc.array(X)):
            levels= sc.linspace(sc.nanmin(X),sc.nanmax(X),_DEFAULTNCNTR)
        else:
            levels= sc.linspace(sc.amin(X),sc.amax(X),_DEFAULTNCNTR)
    if kwargs.has_key('cntrmass') and kwargs['cntrmass']:
        cntrmass= True
        kwargs.pop('cntrmass')
    else:
        cntrmass= False
        if kwargs.has_key('cntrmass'): kwargs.pop('cntrmass')
    if kwargs.has_key('cntrcolors'):
        cntrcolors= kwargs['cntrcolors']
        kwargs.pop('cntrcolors')
    elif contours:
        cntrcolors='k'
    if kwargs.has_key('cntrlabel') and kwargs['cntrlabel']:
        cntrlabel= True
        kwargs.pop('cntrlabel')
    else:
        cntrlabel= False
        if kwargs.has_key('cntrlabel'): kwargs.pop('cntrlabel')
    if kwargs.has_key('cntrlw'):
        cntrlw= kwargs['cntrlw']
        kwargs.pop('cntrlw')
    elif contours:
        cntrlw= None
    if kwargs.has_key('cntrls'):
        cntrls= kwargs['cntrls']
        kwargs.pop('cntrls')
    elif contours:
        cntrls= None
    if kwargs.has_key('cntrlabelsize'):
        cntrlabelsize= kwargs['cntrlabelsize']
        kwargs.pop('cntrlabelsize')
    elif contours:
        cntrlabelsize= None
    if kwargs.has_key('cntrlabelcolors'):
        cntrlabelcolors= kwargs['cntrlabelcolors']
        kwargs.pop('cntrlabelcolors')
    elif contours:
        cntrlabelcolors= None
    if kwargs.has_key('cntrinline'):
        cntrinline= kwargs['cntrinline']
        kwargs.pop('cntrinline')
    elif contours:
        cntrinline= None
    if kwargs.has_key('retCumImage'):
        retCumImage= kwargs['retCumImage']
        kwargs.pop('retCumImage')
    else:
        retCumImage= False
    if kwargs.has_key('colorbar'):
        cb= kwargs['colorbar']
        kwargs.pop('colorbar')
    else:
        cb= False
    if kwargs.has_key('shrink'):
        shrink= kwargs['shrink']
        kwargs.pop('shrink')
    else:
        shrink= None
    if kwargs.has_key('onedhists'):
        onedhists= kwargs['onedhists']
        kwargs.pop('onedhists')
    else:
        onedhists= False
    if kwargs.has_key('onedhistcolor'):
        onedhistcolor= kwargs['onedhistcolor']
        kwargs.pop('onedhistcolor')
    else:
        onedhistcolor= 'k'
    if kwargs.has_key('retAxes'):
        retAxes= kwargs['retAxes']
        kwargs.pop('retAxes')
    else:
        retAxes= False
    if onedhists:
        if overplot: fig= pyplot.gcf()
        else: fig= pyplot.figure()
        nullfmt   = NullFormatter()         # no labels
        # definitions for the axes
        left, width = 0.1, 0.65
        bottom, height = 0.1, 0.65
        bottom_h = left_h = left+width
        rect_scatter = [left, bottom, width, height]
        rect_histx = [left, bottom_h, width, 0.2]
        rect_histy = [left_h, bottom, 0.2, height]
        axScatter = pyplot.axes(rect_scatter)
        axHistx = pyplot.axes(rect_histx)
        axHisty = pyplot.axes(rect_histy)
        # no labels
        axHistx.xaxis.set_major_formatter(nullfmt)
        axHistx.yaxis.set_major_formatter(nullfmt)
        axHisty.xaxis.set_major_formatter(nullfmt)
        axHisty.yaxis.set_major_formatter(nullfmt)
        fig.sca(axScatter)
    ax=pyplot.gca()
    ax.set_autoscale_on(False)
    out= pyplot.imshow(X,extent=extent,**kwargs)
    pyplot.axis(extent)
    _add_axislabels(xlabel,ylabel)
    _add_ticks()
    #Add colorbar
    if cb:
        if shrink is None:
            if kwargs.has_key('aspect'):
                shrink= sc.amin([float(kwargs['aspect'])*0.87,1.])
            else:
                shrink= 0.87
        CB1= pyplot.colorbar(out,shrink=shrink)
        if not zlabel is None:
            if zlabel[0] != '$':
                thiszlabel=r'$'+zlabel+'$'
            else:
                thiszlabel=zlabel
            CB1.set_label(zlabel)
    if contours or retCumImage:
        if kwargs.has_key('aspect'):
            aspect= kwargs['aspect']
        else:
            aspect= None
        if kwargs.has_key('origin'):
            origin= kwargs['origin']
        else:
            origin= None
        if cntrmass:
            #Sum from the top down!
            X[sc.isnan(X)]= 0.
            sortindx= sc.argsort(X.flatten())[::-1]
            cumul= sc.cumsum(sc.sort(X.flatten())[::-1])/sc.sum(X.flatten())
            cntrThis= sc.zeros(sc.prod(X.shape))
            cntrThis[sortindx]= cumul
            cntrThis= sc.reshape(cntrThis,X.shape)
        else:
            cntrThis= X
        if contours:
            cont= pyplot.contour(cntrThis,levels,colors=cntrcolors,
                                 linewidths=cntrlw,extent=extent,aspect=aspect,
                                 linestyles=cntrls,origin=origin)
            if cntrlabel:
                pyplot.clabel(cont,fontsize=cntrlabelsize,
                              colors=cntrlabelcolors,
                              inline=cntrinline)
    if noaxes:
        ax.set_axis_off()
    #Add onedhists
    if not onedhists:
        if retCumImage:
            return cntrThis
        elif retAxes:
            return pyplot.gca()
        else:
            return out
    histx= sc.nansum(X.T,axis=1)*m.fabs(ylimits[1]-ylimits[0])/X.shape[1] #nansum bc nan is *no dens value*
    histy= sc.nansum(X.T,axis=0)*m.fabs(xlimits[1]-xlimits[0])/X.shape[0]
    histx[sc.isnan(histx)]= 0.
    histy[sc.isnan(histy)]= 0.
    dx= (extent[1]-extent[0])/float(len(histx))
    axHistx.plot(sc.linspace(extent[0]+dx,extent[1]-dx,len(histx)),histx,
                 drawstyle='steps-mid',color=onedhistcolor)
    dy= (extent[3]-extent[2])/float(len(histy))
    axHisty.plot(histy,sc.linspace(extent[2]+dy,extent[3]-dy,len(histy)),
                 drawstyle='steps-mid',color=onedhistcolor)
    axHistx.set_xlim( axScatter.get_xlim() )
    axHisty.set_ylim( axScatter.get_ylim() )
    axHistx.set_ylim( 0, 1.2*sc.amax(histx))
    axHisty.set_xlim( 0, 1.2*sc.amax(histy))
    if retCumImage:
        return cntrThis
    elif retAxes:
        return (axScatter,axHistx,axHisty)
    else:
        return out
예제 #38
0
def bovy_dens2d(X, **kwargs):
    """
    NAME:

       bovy_dens2d

    PURPOSE:

       plot a 2d density with optional contours

    INPUT:

       first argument is the density

       matplotlib.pyplot.imshow keywords (see http://matplotlib.sourceforge.net/api/axes_api.html#matplotlib.axes.Axes.imshow)

       xlabel - (raw string!) x-axis label, LaTeX math mode, no $s needed

       ylabel - (raw string!) y-axis label, LaTeX math mode, no $s needed

       xrange

       yrange

       noaxes - don't plot any axes

       overplot - if True, overplot

       colorbar - if True, add colorbar

       shrink= colorbar argument: shrink the colorbar by the factor (optional)

       conditional - normalize each column separately (for probability densities, i.e., cntrmass=True)

       gcf=True does not start a new figure (does change the ranges and labels)

       Contours:
       
       justcontours - if True, only draw contours

       contours - if True, draw contours (10 by default)

       levels - contour-levels

       cntrmass - if True, the density is a probability and the levels are probability masses contained within the contour

       cntrcolors - colors for contours (single color or array)

       cntrlabel - label the contours

       cntrlw, cntrls - linewidths and linestyles for contour

       cntrlabelsize, cntrlabelcolors,cntrinline - contour arguments

       cntrSmooth - use ndimage.gaussian_filter to smooth before contouring

       onedhists - if True, make one-d histograms on the sides

       onedhistcolor - histogram color

       retAxes= return all Axes instances

       retCont= return the contour instance

    OUTPUT:

       plot to output device, Axes instances depending on input

    HISTORY:

       2010-03-09 - Written - Bovy (NYU)

    """
    overplot = kwargs.pop('overplot', False)
    gcf = kwargs.pop('gcf', False)
    if not overplot and not gcf:
        pyplot.figure()
    xlabel = kwargs.pop('xlabel', None)
    ylabel = kwargs.pop('ylabel', None)
    zlabel = kwargs.pop('zlabel', None)
    if 'extent' in kwargs:
        extent = kwargs.pop('extent')
    else:
        xlimits = kwargs.pop('xrange', [0, X.shape[1]])
        ylimits = kwargs.pop('yrange', [0, X.shape[0]])
        extent = xlimits + ylimits
    if not 'aspect' in kwargs:
        kwargs['aspect'] = (xlimits[1] - xlimits[0]) / float(ylimits[1] -
                                                             ylimits[0])
    noaxes = kwargs.pop('noaxes', False)
    justcontours = kwargs.pop('justcontours', False)
    if ('contours' in kwargs and kwargs['contours']) or \
            'levels' in kwargs or justcontours or \
            ('cntrmass' in kwargs and kwargs['cntrmass']):
        contours = True
    else:
        contours = False
    kwargs.pop('contours', None)
    if 'levels' in kwargs:
        levels = kwargs['levels']
        kwargs.pop('levels')
    elif contours:
        if 'cntrmass' in kwargs and kwargs['cntrmass']:
            levels = sc.linspace(0., 1., _DEFAULTNCNTR)
        elif True in sc.isnan(sc.array(X)):
            levels = sc.linspace(sc.nanmin(X), sc.nanmax(X), _DEFAULTNCNTR)
        else:
            levels = sc.linspace(sc.amin(X), sc.amax(X), _DEFAULTNCNTR)
    cntrmass = kwargs.pop('cntrmass', False)
    conditional = kwargs.pop('conditional', False)
    cntrcolors = kwargs.pop('cntrcolors', 'k')
    cntrlabel = kwargs.pop('cntrlabel', False)
    cntrlw = kwargs.pop('cntrlw', None)
    cntrls = kwargs.pop('cntrls', None)
    cntrSmooth = kwargs.pop('cntrSmooth', None)
    cntrlabelsize = kwargs.pop('cntrlabelsize', None)
    cntrlabelcolors = kwargs.pop('cntrlabelcolors', None)
    cntrinline = kwargs.pop('cntrinline', None)
    retCumImage = kwargs.pop('retCumImage', False)
    cb = kwargs.pop('colorbar', False)
    shrink = kwargs.pop('shrink', None)
    onedhists = kwargs.pop('onedhists', False)
    onedhistcolor = kwargs.pop('onedhistcolor', 'k')
    retAxes = kwargs.pop('retAxes', False)
    retCont = kwargs.pop('retCont', False)
    if onedhists:
        if overplot or gcf: fig = pyplot.gcf()
        else: fig = pyplot.figure()
        nullfmt = NullFormatter()  # no labels
        # definitions for the axes
        left, width = 0.1, 0.65
        bottom, height = 0.1, 0.65
        bottom_h = left_h = left + width
        rect_scatter = [left, bottom, width, height]
        rect_histx = [left, bottom_h, width, 0.2]
        rect_histy = [left_h, bottom, 0.2, height]
        axScatter = pyplot.axes(rect_scatter)
        axHistx = pyplot.axes(rect_histx)
        axHisty = pyplot.axes(rect_histy)
        # no labels
        axHistx.xaxis.set_major_formatter(nullfmt)
        axHistx.yaxis.set_major_formatter(nullfmt)
        axHisty.xaxis.set_major_formatter(nullfmt)
        axHisty.yaxis.set_major_formatter(nullfmt)
        fig.sca(axScatter)
    ax = pyplot.gca()
    ax.set_autoscale_on(False)
    if conditional:
        plotthis = X / sc.tile(sc.sum(X, axis=0), (X.shape[1], 1))
    else:
        plotthis = X
    if not justcontours:
        out = pyplot.imshow(plotthis, extent=extent, **kwargs)
    if not overplot:
        pyplot.axis(extent)
        _add_axislabels(xlabel, ylabel)
        _add_ticks()
    #Add colorbar
    if cb and not justcontours:
        if shrink is None:
            shrink = sc.amin([float(kwargs.pop('aspect', 1.)) * 0.87, 1.])
        CB1 = pyplot.colorbar(out, shrink=shrink)
        if not zlabel is None:
            if zlabel[0] != '$':
                thiszlabel = r'$' + zlabel + '$'
            else:
                thiszlabel = zlabel
            CB1.set_label(thiszlabel)
    if contours or retCumImage:
        aspect = kwargs.get('aspect', None)
        origin = kwargs.get('origin', None)
        if cntrmass:
            #Sum from the top down!
            plotthis[sc.isnan(plotthis)] = 0.
            sortindx = sc.argsort(plotthis.flatten())[::-1]
            cumul = sc.cumsum(sc.sort(plotthis.flatten())[::-1]) / sc.sum(
                plotthis.flatten())
            cntrThis = sc.zeros(sc.prod(plotthis.shape))
            cntrThis[sortindx] = cumul
            cntrThis = sc.reshape(cntrThis, plotthis.shape)
        else:
            cntrThis = plotthis
        if contours:
            if not cntrSmooth is None:
                cntrThis = ndimage.gaussian_filter(cntrThis,
                                                   cntrSmooth,
                                                   mode='nearest')
            cont = pyplot.contour(cntrThis,
                                  levels,
                                  colors=cntrcolors,
                                  linewidths=cntrlw,
                                  extent=extent,
                                  aspect=aspect,
                                  linestyles=cntrls,
                                  origin=origin)
            if cntrlabel:
                pyplot.clabel(cont,
                              fontsize=cntrlabelsize,
                              colors=cntrlabelcolors,
                              inline=cntrinline)
    if noaxes:
        ax.set_axis_off()
    #Add onedhists
    if not onedhists:
        if retCumImage:
            return cntrThis
        elif retAxes:
            return pyplot.gca()
        elif retCont:
            return cont
        elif justcontours:
            return cntrThis
        else:
            return out
    histx = sc.nansum(X.T, axis=1) * m.fabs(ylimits[1] - ylimits[0]) / X.shape[
        1]  #nansum bc nan is *no dens value*
    histy = sc.nansum(X.T,
                      axis=0) * m.fabs(xlimits[1] - xlimits[0]) / X.shape[0]
    histx[sc.isnan(histx)] = 0.
    histy[sc.isnan(histy)] = 0.
    dx = (extent[1] - extent[0]) / float(len(histx))
    axHistx.plot(sc.linspace(extent[0] + dx, extent[1] - dx, len(histx)),
                 histx,
                 drawstyle='steps-mid',
                 color=onedhistcolor)
    dy = (extent[3] - extent[2]) / float(len(histy))
    axHisty.plot(histy,
                 sc.linspace(extent[2] + dy, extent[3] - dy, len(histy)),
                 drawstyle='steps-mid',
                 color=onedhistcolor)
    axHistx.set_xlim(axScatter.get_xlim())
    axHisty.set_ylim(axScatter.get_ylim())
    axHistx.set_ylim(0, 1.2 * sc.amax(histx))
    axHisty.set_xlim(0, 1.2 * sc.amax(histy))
    if retCumImage:
        return cntrThis
    elif retAxes:
        return (axScatter, axHistx, axHisty)
    elif justcontours:
        return cntrThis
    else:
        return out
예제 #39
0
    def processdata(self):
        """ This will perform the the data processing and create the ACF estimates
        for both the data and noise.
        Inputs:
        timevec - A numpy array of times in seconds where the integration will begin.
        inttime - The integration time in seconds.
        lagfunc - A function that will make the desired lag products.
        Outputs:
        DataLags: A dictionary with keys 'Power' 'ACF','RG','Pulses' that holds
        the numpy arrays of the data.
        NoiseLags: A dictionary with keys 'Power' 'ACF','RG','Pulses' that holds
        the numpy arrays of the data.
        """
        timevec = self.simparams['Timevec'] +self.timeoffset
        inttime = self.simparams['Tint']
        # Get array sizes

        NNs = int(self.simparams['NNs'])
        range_gates = self.simparams['Rangegates']
        N_rg = len(range_gates)# take the size
        pulse = self.simparams['Pulse']
        Pulselen = len(pulse)
        N_samps = N_rg +Pulselen-1
        simdtype = self.simparams['dtype']
        Ntime=len(timevec)

        if 'outangles' in self.simparams.keys():
            Nbeams = len(self.simparams['outangles'])
            inttime = inttime
        else:
            Nbeams = len(self.simparams['angles'])


        # Choose type of processing
        if self.simparams['Pulsetype'].lower() == 'barker':
            lagfunc=BarkerLag
            Nlag=1
        else:
            lagfunc=CenteredLagProduct
            Nlag=Pulselen
        # initialize output arrays
        outdata = sp.zeros((Ntime,Nbeams,N_rg,Nlag),dtype=simdtype)
        outaddednoise = sp.zeros((Ntime,Nbeams,N_rg,Nlag),dtype=simdtype)
        outnoise = sp.zeros((Ntime,Nbeams,NNs-Pulselen+1,Nlag),dtype=simdtype)
        pulses = sp.zeros((Ntime,Nbeams))
        pulsesN = sp.zeros((Ntime,Nbeams))
        timemat = sp.zeros((Ntime,2))
        Ksysvec = self.sensdict['Ksys']
        # set up arrays that hold the location of pulses that are to be processed together
        infoname = self.datadir / 'INFO.h5'
        # Just going to assume that the info file is in the directory
        infodict =h52dict(str(infoname))
        flist =  infodict['Files']
        file_list = [str(self.datadir/i) for i in flist]
        pulsen_list = infodict['Pulses']
        beamn_list = infodict['Beams']
        time_list = infodict['Time']
        file_loclist = [ifn*sp.ones(len(ifl)) for ifn,ifl in enumerate(beamn_list)]
        if 'NoiseTime'in infodict.keys():
            sridata = True
            tnoiselist=infodict['NoiseTime']
            nfile_loclist=[ifn*sp.ones(len(ifl)) for ifn,ifl in enumerate(tnoiselist)]
        else:
            sridata=False

        pulsen = sp.hstack(pulsen_list).astype(int)# pulse number
        beamn = sp.hstack(beamn_list).astype(int)# beam numbers
        ptimevec = sp.hstack(time_list).astype(float)# time of each pulse
        file_loc = sp.hstack(file_loclist).astype(int)# location in the file
        if sridata:
            ntimevec = sp.vstack(tnoiselist).astype(float)
            nfile_loc = sp.hstack(nfile_loclist).astype(int)
            outnoise = sp.zeros((Ntime,Nbeams,NNs-Pulselen+1,Nlag),dtype=simdtype)

        # run the time loop
        print("Forming ACF estimates")

        # For each time go through and read only the necisary files
        for itn,it in enumerate(timevec):
            print("\tTime {0:d} of {1:d}".format(itn,Ntime))
            # do the book keeping to determine locations of data within the files
            cur_tlim = (it,it+inttime)
            curcases = sp.logical_and(ptimevec>=cur_tlim[0],ptimevec<cur_tlim[1])
            # SRI data Hack
            if sridata:
                curcases_n=sp.logical_and(ntimevec[:,0]>=cur_tlim[0],ntimevec[:,0]<cur_tlim[1])
                curfileloc_n = nfile_loc[curcases_n]
                curfiles_n = set(curfileloc_n)
            if  not sp.any(curcases):
                print("\tNo pulses for time {0:d} of {1:d}, lagdata adjusted accordinly".format(itn,Ntime))
                outdata = outdata[:itn]
                outnoise = outnoise[:itn]
                pulses=pulses[:itn]
                pulsesN=pulsesN[:itn]
                timemat=timemat[:itn]
                continue
            pulseset = set(pulsen[curcases])
            poslist = [sp.where(pulsen==item)[0] for item in pulseset ]
            pos_all = sp.hstack(poslist)
            try:
                pos_all = sp.hstack(poslist)
                curfileloc = file_loc[pos_all]
            except:
                pdb.set_trace()
            # Find the needed files and beam numbers
            curfiles = set(curfileloc)
            beamlocs = beamn[pos_all]
            timemat[itn,0] = ptimevec[pos_all].min()
            timemat[itn,1]=ptimevec[pos_all].max()
            # cur data pulls out all data from all of the beams and posisions
            curdata = sp.zeros((len(pos_all),N_samps),dtype = simdtype)
            curaddednoise = sp.zeros((len(pos_all),N_samps),dtype = simdtype)
            curnoise = sp.zeros((len(pos_all),NNs),dtype = simdtype)
            # Open files and get required data
            # XXX come up with way to get open up new files not have to reread in data that is already in memory
            for ifn in curfiles:
                curfileit =  [sp.where(pulsen_list[ifn]==item)[0] for item in pulseset ]
                curfileitvec = sp.hstack(curfileit)
                ifile = file_list[ifn]
                curh5data = h52dict(ifile)
                file_arlocs = sp.where(curfileloc==ifn)[0]
                curdata[file_arlocs] = curh5data['RawData'][curfileitvec]


                curaddednoise[file_arlocs] = curh5data['AddedNoise'].astype(simdtype)[curfileitvec]
                # Read in noise data when you have don't have ACFs
                if not sridata:
                    curnoise[file_arlocs] = curh5data['NoiseData'].astype(simdtype)[curfileitvec]
            #SRI data
            if sridata:
                curnoise = sp.zeros((len(curfileloc_n),Nbeams,NNs-Pulselen+1,Pulselen),dtype = simdtype)
                for ifn in curfiles_n:
                    curfileit_n = sp.where(sp.logical_and(tnoiselist[ifn][:,0]>=cur_tlim[0],tnoiselist[ifn][:,0]<cur_tlim[1]))[0]
                    ifile=file_list[ifn]
                    curh5data_n = h52dict(ifile)
                    file_arlocs = sp.where(curfileloc_n==ifn)[0]
                    curnoise[file_arlocs] = curh5data_n['NoiseDataACF'][curfileit_n]

            # differentiate between phased arrays and dish antennas
            if self.sensdict['Name'].lower() in ['risr','pfisr','risr-n']:
                # After data is read in form lags for each beam
                for ibeam in range(Nbeams):
                    print("\t\tBeam {0:d} of {0:d}".format(ibeam,Nbeams))
                    beamlocstmp = sp.where(beamlocs==ibeam)[0]
                    pulses[itn,ibeam] = len(beamlocstmp)

                    outdata[itn,ibeam] = lagfunc(curdata[beamlocstmp].copy(),
                        numtype=self.simparams['dtype'], pulse=pulse,lagtype=self.simparams['lagtype'])
                    if sridata:
                        pulsesN[itn,ibeam] = len(curnoise)
                        outnoise[itn,ibeam] = sp.nansum(curnoise[:,ibeam],axis=0)
                    else:
                        pulsesN[itn,ibeam] = len(beamlocstmp)
                        outnoise[itn,ibeam] = lagfunc(curnoise[beamlocstmp].copy(),
                            numtype=self.simparams['dtype'], pulse=pulse,lagtype=self.simparams['lagtype'])
                    outaddednoise[itn,ibeam] = lagfunc(curaddednoise[beamlocstmp].copy(),
                        numtype=self.simparams['dtype'], pulse=pulse,lagtype=self.simparams['lagtype'])
            else:
                for ibeam,ibeamlist in enumerate(self.simparams['outangles']):
                    print("\t\tBeam {0:d} of {1:d}".format(ibeam,Nbeams))
                    beamlocstmp = sp.where(sp.in1d(beamlocs,ibeamlist))[0]
                    curbeams = beamlocs[beamlocstmp]
                    ksysmat = Ksysvec[curbeams]
                    ksysmean = Ksysvec[ibeamlist[0]]
                    inputdata = curdata[beamlocstmp].copy()
                    noisedata = curnoise[beamlocstmp].copy()
                    noisedataadd=curaddednoise[beamlocstmp].copy()
                    ksysmult = ksysmean/sp.tile(ksysmat[:,sp.newaxis],(1,inputdata.shape[1]))
                    ksysmultn = ksysmean/sp.tile(ksysmat[:,sp.newaxis],(1,noisedata.shape[1]))
                    ksysmultna = ksysmean/sp.tile(ksysmat[:,sp.newaxis],(1,noisedataadd.shape[1]))
                    pulses[itn,ibeam] = len(beamlocstmp)
                    pulsesN[itn,ibeam] = len(beamlocstmp)
                    outdata[itn,ibeam] = lagfunc(inputdata *ksysmult,
                        numtype=self.simparams['dtype'], pulse=pulse,lagtype=self.simparams['lagtype'])
                    outnoise[itn,ibeam] = lagfunc(noisedata*ksysmultn,
                        numtype=self.simparams['dtype'], pulse=pulse,lagtype=self.simparams['lagtype'])
                    outaddednoise[itn,ibeam] = lagfunc(noisedataadd*ksysmultna,
                        numtype=self.simparams['dtype'], pulse=pulse,lagtype=self.simparams['lagtype'])
        # Create output dictionaries and output data
        DataLags = {'ACF':outdata,'Pow':outdata[:,:,:,0].real,'Pulses':pulses,
                    'Time':timemat,'AddedNoiseACF':outaddednoise}
        NoiseLags = {'ACF':outnoise,'Pow':outnoise[:,:,:,0].real,'Pulses':pulsesN,'Time':timemat}
        return(DataLags,NoiseLags)