Beispiel #1
0
def bin_run(run, bin_conf_name, jackreg_col=None):
    """
    Do the binning and write out a file
    """
    import fitsio

    data = read_collated(run)

    binner=Binner(bin_conf_name)
    res = binner.bin(data, jackreg_col=jackreg_col)

    if jackreg_col is None:
        fname=get_binned_file(run, bin_conf_name)
        d=get_binned_dir(run, bin_conf_name)
    else:
        fname=get_jack_file(run, bin_conf_name)
        d=get_jack_dir(run, bin_conf_name)

    if not os.path.exists(d):
        print("Making dir:",d)
        os.makedirs(d)
    
    print("writing:",fname)
    fitsio.write(fname, res, clobber=True)

    for i in xrange(binner.get_nbin()):
        plt=binner.plot_radec(data,i)
        epsname=fname.replace('.fits','-%02d-radec.eps' % i)
        pngname=epsname.replace('.eps','.png')
        print(pngname)
        plt.write_img(1000,1000,pngname)

    command='cd %s; im2html -p *.png > radec.html' % d
    print("making html file")
    os.system(command)
Beispiel #2
0
    def cc_restore_stars(self, algorithm):
        """
        Restores the cookie cut regions back to the compressed image with the
        ORIGINAL pixel values. 
        
        @type self: SuperBit_Compression
        @type image: Numpy Array
        @rtype: None
        """
        regions = []
        index = 0
        y_max = self.original_image.shape[0]
        x_max = self.original_image.shape[1]
        for star in self.cookies:
            cookie = star[0]
            x = star[1]
            y = star[2]
            a_size = star[3]
            b_size = star[4]

            left_x, right_x, up_y, down_y = self.square_cookie(
                a_size, b_size, x, y)

            if algorithm.lower() == "hcomp":
                self.h_compress[up_y:down_y, left_x:right_x] = cookie
            elif algorithm.lower() == "bs":
                self.compressed_image[up_y:down_y, left_x:right_x] = cookie
            index += 1

        if algorithm.lower() == "hcomp":
            fitsio.write("hcomp_" + self.image_name, self.h_compress, \
            header=self.header, clobber=True)
        elif algorithm.lower() == "bs":
            fitsio.write("bs_" + self.image_name, self.compressed_image, \
            header=self.header, clobber=True)
Beispiel #3
0
def merge_partial_maps(filenames,outfile,**kwargs):
    filenames = np.atleast_1d(filenames)

    header = fitsio.read_header(filenames[0],ext=kwargs.get('ext',1))
    nside = header['NSIDE']
    data = ugali.utils.fileio.load_files(filenames,**kwargs)
    pix = data['PIXEL']

    ndupes = len(pix) - len(np.unique(pix))
    if ndupes > 0:
        msg = '%i duplicate pixels during load.'%(ndupes)
        raise Exception(msg)

    extname = 'DISTANCE_MODULUS'
    distance = ugali.utils.fileio.load_files(filenames,ext=extname)[extname]
    unique_distance = np.unique(distance)
    # Check if distance moduli are the same...
    if np.any(distance[:len(unique_distance)] != unique_distance):
        msg = "Non-matching distance modulus:"
        msg += '\n'+str(distance[:len(unique_distance)])
        msg += '\n'+str(unique_distance)
        raise Exception(msg)

    write_partial_map(outfile,data=data,nside=nside,clobber=True)
    fitsio.write(outfile,{extname:unique_distance},extname=extname)
Beispiel #4
0
def update_cat(incat, oucat, weight):
    weights = np.load(weight)
    #print(weights)
    cat = ft.read(incat)
    cat['WEIGHT_SYSTOT'] = 1./weights
    ft.write(oucat, cat) # write
    print('read %s\nwrite %s'%(incat, oucat))
Beispiel #5
0
def get_pulse_dist(outdir, ind, batch_size=None, ret=False, noise=True):
    if ind % 5000 == 0:
        print("{}".format(ind))
    if batch_size is None:
        batch_size = np.random.randint(1, 5, size=1)
    seed = (int(time.clock() * 1000) * ind) % (4294967295)  #2^32-1
    np.random.seed(seed=seed)
    f_0 = np.random.uniform(0., 512., size=batch_size)[..., np.newaxis,
                                                       np.newaxis]  #in ms
    amp = np.random.uniform(0.25, 2.5, size=batch_size)[..., np.newaxis,
                                                        np.newaxis]
    width = np.random.uniform(1., 20., size=batch_size)[..., np.newaxis,
                                                        np.newaxis]
    slope = np.random.uniform(-15, 15, size=batch_size)[..., np.newaxis,
                                                        np.newaxis]
    t = np.arange(16)[np.newaxis, ..., np.newaxis]
    f = np.arange(512)[np.newaxis, np.newaxis, ...]
    f0_all = f_0 + slope * t
    pulse = np.sum(amp * np.exp(-0.5 * (f - f0_all)**2 / width**2.), axis=0)
    if noise:
        noise_level = 0.2
        pulse += noise_level * np.random.random(pulse.shape)
    if outdir is not None:
        fitsio.write(outdir + "img_" + str(ind) + '.fits', pulse)
        scipy.misc.imsave(outdir + "pulse_" + str(ind) + '.png', pulse)
    if ret:
        return pulse
Beispiel #6
0
def imstat(dataMap, outfn='stats'):
    from astropy.stats import sigma_clip
    from scipy.stats import mode, scoreatpercentile
    array_stats = bokutil.array_stats
    fnlen = len(os.path.basename(dataMap['files'][0]))
    st = np.zeros(len(dataMap['flatSequence']),
                  dtype=[('file', 'S%d' % fnlen), ('expTime', 'f4'),
                         ('median', '16f4'),
                         ('mean', '16f4'), ('mode', '16f4'), ('iqr25', '16f4'),
                         ('iqr75', '16f4'), ('iqr10', '16f4'),
                         ('iqr90', '16f4')])
    for _i, i in enumerate(dataMap['flatSequence']):
        expTime = dataMap['expTime'][i]
        fn = os.path.basename(dataMap['files'][i])
        fits = fitsio.FITS(dataMap['files'][i])
        print '%s %4.1f  ' % (fn, expTime),
        st['file'][_i] = fn
        st['expTime'][_i] = expTime
        for j, extn in enumerate(['IM%d' % n for n in range(1, 17)]):
            modeVal, pix = array_stats(fits[extn].read()[dataMap['statsPix']],
                                       method='mode',
                                       retArray=True)
            st['mode'][_i, j] = modeVal
            st['mean'][_i, j] = pix.mean()
            st['median'][_i, j] = np.ma.median(pix)
            st['iqr25'][_i, j] = scoreatpercentile(pix, 25)
            st['iqr75'][_i, j] = scoreatpercentile(pix, 75)
            st['iqr10'][_i, j] = scoreatpercentile(pix, 10)
            st['iqr90'][_i, j] = scoreatpercentile(pix, 90)
            print '%5d ' % (modeVal),
        print
    fitsio.write(outfn + '.fits', st, clobber=True)
Beispiel #7
0
def set_astro_refine(coadd_run, srclist):
    df=desdb.files.DESFiles()

    outdir=df.dir('astro_refine_fits',coadd_run=coadd_run)
    if not os.path.exists(outdir):
        try:
            print("making dir:",outdir)
            os.makedirs(outdir)
        except:
            pass

    for s in srclist:
        head_file=s['astro_refine']
        fits_file=df.url('astro_refine_fits',
                         coadd_run=coadd_run,
                         expname=s['expname'],
                         ccd=s['ccd'])
        s['wcs_file'] = fits_file

        if not os.path.exists(fits_file):

            print("reading:",head_file)
            hdata = fitsio.read_scamp_head(head_file)

            print("writing:",fits_file)
            fitsio.write(fits_file, None, header=hdata, clobber=True)
Beispiel #8
0
def run_metacal(*, tilename, output_meds_dir, bands, seed):
    """Run metacal on a tile.

    Parameters
    ----------
    tilename : str
        The DES coadd tile on which to run metacal.
    output_meds_dir : str
        The output DEADATA/MEDS_DIR for the simulation data products.
    bands : str
        The bands on which to run metacal.
    seed : int
        The seed for the global RNG.
    """
    meds_files = [
        get_meds_file_path(meds_dir=output_meds_dir,
                           medsconf=MEDSCONF,
                           tilename=tilename,
                           band=band) for band in bands
    ]
    with NGMixMEDS(meds_files[0]) as m:
        cat = m.get_cat()
    logger.info(' meds files %s', meds_files)

    n_chunks = joblib.externals.loky.cpu_count()
    n_obj_per_chunk = cat.size // n_chunks
    if n_obj_per_chunk * n_chunks < cat.size:
        n_obj_per_chunk += 1
    assert n_obj_per_chunk * n_chunks >= cat.size
    logger.info(' running metacal for %d objects in %d chunks', cat.size,
                n_chunks)

    seeds = np.random.RandomState(seed=seed).randint(1, 2**30, size=n_chunks)

    jobs = []
    for chunk in range(n_chunks):
        start = chunk * n_obj_per_chunk
        end = min(start + n_obj_per_chunk, cat.size)
        jobs.append(
            joblib.delayed(_run_mcal_one_chunk)(meds_files, start, end,
                                                seeds[chunk]))

    with joblib.Parallel(n_jobs=n_chunks,
                         backend='loky',
                         verbose=50,
                         max_nbytes=None) as p:
        outputs = p(jobs)

    assert not all([o is None for o in outputs]), ("All metacal fits failed!")

    output = eu.numpy_util.combine_arrlist(
        [o for o in outputs if o is not None])
    logger.info(' %d of %d metacal fits worked!', output.size, cat.size)

    mcal_pth = get_mcal_file_path(meds_dir=output_meds_dir,
                                  medsconf=MEDSCONF,
                                  tilename=tilename)
    logger.info(' metacal output: "%s"', mcal_pth)
    make_dirs_for_file(mcal_pth)
    fitsio.write(mcal_pth, output, clobber=True)
Beispiel #9
0
def saveClass(q,s,r,ra,dec,g,a,filename):
    """
    NAME:
       saveClass
    PURPOSE:
       save the classifications
    INPUT:
       q, s, r, ra, dec, g, a
       filename - name of the file that the output will be saved to
    OUTPUT:
       (none)
    HISTORY:
       2011-01-30 - Written - Bovy (NYU)
    """
    #Create recarray
    ndata= len(q)
    out= numpy.recarray((ndata,),
                        dtype=[('ra','f8'),
                               ('dec','f8'),
                               ('gamma','f8'),
                               ('loga','f8'),
                               ('logpx_qso','f8'),
                               ('logpx_star','f8'),
                               ('logpx_rrlyrae','f8')])
    out.logpx_qso= q
    out.logpx_star= s
    out.logpx_rrlyrae= r
    out.ra= ra
    out.dec= dec
    out.gamma= g
    out.loga= a
    #Now write to fits
    fitsio.write(filename,out,clobber=True)
    return None
Beispiel #10
0
    def save(self, filename):
        """Save to a FITS file

        :Parameters:
            - `filename`: the name of the FITS file
        """
        fitsio.write(filename, self.mask, header=self.header, clobber=True)
Beispiel #11
0
    def write_fit(self):
        import fitsio


        npars=self['npars']
        output=zeros(1, dtype=[('n','f8'),
                               ('hlr','f8'),
                               ('arate','f8'),
                               ('chi2per','f8'),
                               ('dof','f8'),
                               ('pars','f8',npars),
                               ('pars_norm','f8',npars),
                               ('pars_err','f8',npars),
                               ('pars_cov','f8',(npars,npars))])


        output['n'] = self['n']
        output['hlr'] = self['hlr']
        output['arate'] = self.res['arate']
        output['chi2per'] = self.res['chi2per']
        output['dof'] = self.res['dof']
        output['pars'][0,:] = self.res['pars']
        output['pars_norm'][0,:] = self.res['pars_norm']
        output['pars_err'][0,:] = self.res['pars_err']
        output['pars_cov'][0,:,:] = self.res['pars_cov']

        print(self.fits_name)
        fitsio.write(self.fits_name, output, clobber=True)
Beispiel #12
0
    def write_membership(self,filename):
        """
        Write a catalog file of the likelihood region including
        membership properties.

        Parameters:
        -----------
        filename : output filename
        
        Returns:
        --------
        None
        """
        # Column names
        name_objid = self.config['catalog']['objid_field']
        name_mag_1 = self.config['catalog']['mag_1_field']
        name_mag_2 = self.config['catalog']['mag_2_field']
        name_mag_err_1 = self.config['catalog']['mag_err_1_field']
        name_mag_err_2 = self.config['catalog']['mag_err_2_field']

        # Coordinate conversion
        #ra,dec = gal2cel(self.catalog.lon,self.catalog.lat)
        glon,glat = self.catalog.glon_glat
        ra,dec    = self.catalog.ra_dec

        # Angular and isochrone separations
        sep = angsep(self.source.lon,self.source.lat,
                     self.catalog.lon,self.catalog.lat)
        isosep = self.isochrone.separation(self.catalog.mag_1,self.catalog.mag_2)

        # If size becomes an issue we can make everything float32
        data = odict()
        data[name_objid]     = self.catalog.objid
        data['GLON']         = glon
        data['GLAT']         = glat
        data['RA']           = ra
        data['DEC']          = dec
        data[name_mag_1]     = self.catalog.mag_1
        data[name_mag_err_1] = self.catalog.mag_err_1
        data[name_mag_2]     = self.catalog.mag_2
        data[name_mag_err_2] = self.catalog.mag_err_2
        data['COLOR']        = self.catalog.color
        data['ANGSEP']       = sep.astype(np.float32)
        data['ISOSEP']       = isosep.astype(np.float32)
        data['PROB']         = self.p.astype(np.float32)
     
        # HIERARCH allows header keywords longer than 8 characters
        header = []
        for param,value in self.source.params.items():
            card = dict(name='HIERARCH %s'%param.upper(),
                        value=value.value,
                        comment=param)
            header.append(card)
        card = dict(name='HIERARCH %s'%'TS',value=self.ts(),
                    comment='test statistic')
        header.append(card)
        card = dict(name='HIERARCH %s'%'TIMESTAMP',value=time.asctime(),
                    comment='creation time')
        header.append(card)
        fitsio.write(filename,data,header=header,clobber=True)
Beispiel #13
0
def gen_write_fits(file_name, col_names, columns):
    """Write some columns to an output FITS file with the given column names.

    :param file_name:   The name of the file to write to.
    :param col_names:   A list of columns names for the given columns.
    :param columns:     A list of numpy arrays with the data to write.
    """
    try:
        import fitsio
        data = numpy.empty(len(columns[0]),
                           dtype=[(name, 'f8') for name in col_names])
        for (name, col) in zip(col_names, columns):
            data[name] = col
        fitsio.write(file_name, data, clobber=True)
    except ImportError:
        try:
            import astropy.io.fits as pyfits
        except:
            import pyfits

        cols = pyfits.ColDefs([
            pyfits.Column(name=name, format='D', array=col)
            for (name, col) in zip(col_names, columns)
        ])

        # Depending on the version of pyfits, one of these should work:
        try:
            tbhdu = pyfits.BinTableHDU.from_columns(cols)
        except:
            tbhdu = pyfits.new_table(cols)
        tbhdu.writeto(file_name, clobber=True)
Beispiel #14
0
  def create_random_cat_finalise(label=''):
    """
    This function removes duplicate randoms from the results of create_random_cat() and writes a fits file with the random catalog.
    """
    import os

    def unique(a):
      order = np.lexsort(a.T)
      a = a[order]
      diff = np.diff(a, axis=0)
      ui = np.ones(len(a), 'bool')
      ui[1:] = (diff != 0).any(axis=1) 
      return a[ui],ui

    a=np.vstack((np.load(label+'ra.npy'),np.load(label+'dec.npy'))).T
    u,i=unique(a)
    a=a[i]

    ran=np.empty(len(a), dtype=[('ra','f8')]+[('dec','f8')])
    ran['ra']=a[:,0].T
    ran['dec']=a[:,1].T

    os.remove(label+'ra.npy')
    os.remove(label+'dec.npy')

    fio.write(label+'random.fits.gz',ran,clobber=True)
    
    return
Beispiel #15
0
def process_file(header, newfilename=None):
    fac = 8
    shape = int(header['NAXIS1']) / fac, int(header['NAXIS2']) / fac
    header['NAXIS1'] = shape[0]
    header['NAXIS2'] = shape[1]
    header['CRPIX1'] = shape[0] / 2 + 0.5
    header['CRPIX2'] = shape[1] / 2 + 0.5
    header['CD1_1'] = float(header['CD1_1']) * fac
    header['CD2_2'] = float(header['CD2_2']) * fac
    header['IMTYPE'] = 'ebv'
    header.pop('FILTER', None)
    y, x = numpy.array(numpy.indices(shape), dtype='f8').reshape(2, -1)
    x += 0.5
    y += 0.5
    world = wcs_tangent.pix2ang_hdr(numpy.array((x, y)),
                                    header,
                                    zero_offset=True)
    ebv, junk = sfdmap.ebv(world[0], world[1])
    ebv = ebv.reshape(shape)
    print newfilename, world.max(axis=1), world.min(axis=1)
    try:
        os.unlink(newfilename)
    except OSError:
        pass
    try:
        os.makedirs(os.path.dirname(newfilename))
    except OSError:
        pass
    fitsio.write(newfilename, ebv, header=header)
Beispiel #16
0
  def footprint_area(cat,ngal=1,mask=None,nside=4096,nest=True,label=''):
    import healpy as hp
    import matplotlib
    matplotlib.use ('agg')
    import matplotlib.pyplot as plt
    # plt.style.use('/home/troxel/SVA1/SVA1StyleSheet.mplstyle')
    from matplotlib.colors import LogNorm
    import pylab

    mask=CatalogMethods.check_mask(cat.coadd,mask)

    if not hasattr(cat, 'pix'):
      cat.pix=CatalogMethods.radec_to_hpix(cat.ra,cat.dec,nside=nside,nest=True)
    area=hp.nside2pixarea(nside)*(180./np.pi)**2
    print 'pixel area (arcmin)', area*60**2
    mask1=np.bincount(cat.pix[mask])>ngal
    print 'footprint area (degree)', np.sum(mask1)*area

    pix=np.arange(len(mask1))[mask1]
    print pix
    tmp=np.zeros((12*nside**2), dtype=[('hpix','int')])
    tmp['hpix'][pix.astype(int)]=1
    print tmp['hpix'][pix.astype(int)]
    fio.write('footprint_hpix'+label+'.fits.gz',tmp,clobber=True)

    tmp2=np.zeros(12*nside**2)
    tmp2[tmp.astype(int)]=1
    hp.cartview(tmp2,nest=True)
    plt.savefig('footprint_hpix'+label+'.png')
    plt.close()

    return 
Beispiel #17
0
def write_fits(postdata, bins, outfile, raref, decref, weights=None):
    """
    write posterior data as fits file
    """
    xmin = postdata[:,0].min() - 0.1*(postdata[:,0].max() - postdata[:,0].min())
    xmax = postdata[:,0].max() + 0.1*(postdata[:,0].max() - postdata[:,0].min())

    ymin = postdata[:,1].min() - 0.1*(postdata[:,1].max() - postdata[:,1].min())
    ymax = postdata[:,1].max() + 0.1*(postdata[:,1].max() - postdata[:,1].min())

    H, X, Y = np.histogram2d(postdata[:,0].flatten(), postdata[:,1].flatten(), \
                             bins=bins, range=[[xmin, xmax], [ymin, ymax]], \
                             weights=weights)
    H = H.astype(np.float32)

    dx = X[1] - X[0]
    dy = Y[1] - Y[0]
    filename = "%s_post.fits" % (outfile)

    header   = OrderedDict()
    header["CTYPE1"]  = "RA---TAN"
    header["CTYPE2"]  = "DEC--TAN"
    header["CUNIT1"]  = "deg"
    header["CUNIT2"]  = "deg"
    header["CRVAL1"]  = raref
    header["CDELT1"]  = dx
    header["CRVAL2"]  = decref
    header["CDELT2"]  = dy
    header["WCSAXES"] = 2
    header["CRPIX1"]  = (-X[0])/dx
    header["CRPIX2"]  = (-Y[0])/dy
    header["EQUINOX"] = 2000

    fitsio.write(filename, H, header=header)
Beispiel #18
0
 def test_load_append_table_memory_chunk_fits(self):
     print('\n*** test_load_append_table_memory_chunk_fits ***\n')
     data = create_test_data()
     for i in range(4):
         data = np.concatenate((data, data))
     fitsio.write(self.fitsfile, data, clobber=True)
     self.assertTrue(os.path.exists(self.fitsfile))
     # memsize
     self.con.drop_table(self.tablename)
     command = "load_table %s --tablename %s --memsize %s --chunksize %s" % (
         self.fitsfile, self.tablename, self.memsize, self.chunk * 10)
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows * 16)
     # appending
     command = "append_table %s --tablename %s --memsize %s --chunksize %s" % (
         self.fitsfile, self.tablename, self.memsize, self.chunk * 200)
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows * 2 * 16)
     # end
     os.remove(self.fitsfile)
     self.con.drop_table(self.tablename)
Beispiel #19
0
 def _write_checkpoint(self, tm):
     print >>stderr,'checkpointing at',tm,'seconds'
     print >>stderr,self.checkpoint_file
     fitsio.write(self.checkpoint_file,
                  self.data,
                  clobber=True)
     self.checkpointed=True
Beispiel #20
0
 def _consumeRead(self, path, cmd, header=None):
     #  /home/data/wincharis/H2RG-C17206-ASIC-104/UpTheRamp/20160712210126/H2RG_R01_M01_N01.fits
     dirName, fileName = os.path.split(path)
     cmd.diag('text="checking %s"' % (fileName))
     match = re.match('^H2RG_R0*(\d+)_M0*(\d+)_N0*(\d+)\.fits', fileName)
     if match is None:
         cmd.warn("failed to split up filename: %s" % (file))
         return
     rampN, groupN, readN = [int(m) for m in match.group(1,2,3)]
     cmd.diag('text="new read %d %d %d"' % (rampN, groupN, readN))
     if readN == 1:
         if header is not None:
             cmd.diag('text="getting header"')
             subaruHdr = header
         else:
             subaruHdr = pyfits.Header()
         cards = [dict(name='IDLPATH', value=dirName)]
         for c in subaruHdr.cards:
             cards.append(dict(name=c.keyword, value=c.value, comment=c.comment))
         phdu = fitsio.FITSHDR(cards)
         fitsio.write(self.outfile, None, header=phdu, clobber=True)
         cmd.diag('text="new file %s"' % (self.outfile))
         
     inData, inHdr = fitsio.read(path, header=True)
     stackFile = fitsio.FITS(self.outfile, mode='rw')
     stackFile.write(inData, header=inHdr)
     stackFile[-1].write_checksum()
     stackFile.close()
     cmd.inform('readN=%d,%d,%d,%s' % (rampN,groupN,readN,self.outfile))
Beispiel #21
0
    def saveAtm(self, filename, clobber=False):
        """
        Save the atmosphere to a fits file

        parameters
        ----------
        filename: string
            output filename
        clobber: bool, optional
            clobber existing output file.  Default is False
        """

        import fitsio

        hdr = fitsio.FITSHDR()
        hdr['PMB'] = self.pmb
        hdr['PWV'] = self.pwv
        hdr['O3'] = self.o3
        hdr['TAU'] = self.tau
        hdr['LAMNORM'] = self.lambdaNorm
        hdr['ALPHA'] = self.alpha
        hdr['ZENITH'] = self.zenith
        hdr['CO2MX'] = self.co2MX
        hdr['ELEV'] = self.elevation

        fitsio.write(filename, self.atm, header=hdr, clobber=clobber)
Beispiel #22
0
    def _write_hpx_fits(pixlist):
        """from files that touch a pixel, write out objects in each pixel"""
        pixnum, files = pixlist
        # ADM only proceed if some files touch a pixel.
        if len(files) > 0:
            # ADM track if it's our first time through the files loop.
            first = True
            # ADM Read in files that touch a pixel.
            for file in files:
                filename = os.path.join(fitsdir, file)
                objs = fitsio.read(filename)
                # ADM only retain objects in the correct pixel.
                pix = radec2pix(nside, objs["RA"], objs["DEC"])
                if first:
                    done = objs[pix == pixnum]
                    first = False
                else:
                    done = np.hstack([done, objs[pix == pixnum]])
            # ADM construct the name of the output file.
            outfilename = 'healpix-{:05d}.fits'.format(pixnum)
            outfile = os.path.join(hpxdir, outfilename)
            # ADM write out the file.
            hdr = fitsio.FITSHDR()
            hdr['HPXNSIDE'] = nside
            hdr['HPXNEST'] = True
            fitsio.write(outfile, done, extname='URATHPX', header=hdr)

        return
Beispiel #23
0
def save_file(filename, data, header, format, unitdict=None):
    basename = os.path.splitext(filename)[0]
    if format == 'fits':
        units = None
	# ADM derive the units from the data columns if possible.
        if unitdict is not None:
            # ADM some columns from external-match files might not have
            # ADM units, so pass an empty string for external columns.
            units = [unitdict[col] if col in unitdict.keys() else ""
                     for col in data.dtype.names]

        # ADM add the external match code version header dependency.
        dep = [int(key.split("DEPNAM")[-1]) for key in header.keys()
               if 'DEPNAM' in key]
        if len(dep) == 0:
            nextdep = 0
        else:
            nextdep = np.max(dep) + 1
        header["DEPNAM{:02d}".format(nextdep)] = 'match_external'
        header["DEPVER{:02d}".format(nextdep)] = git_version()

        filename = basename + '.fits'
        fitsio.write(filename, data, extname='MATCHED', header=header,
                     clobber=True, units=units)
    elif format == 'hdf5':
        filename = basename + '.hdf5'
        import h5py
        with h5py.File(filename, 'w') as ff:
            dset = ff.create_dataset('MATCHED', data=data)
            for key in header:
                dset.attrs[key] = header[key]
    else:
        raise ValueError("Unknown format")
Beispiel #24
0
 def test_load_append_table_memory_fits(self):
     print('\n*** test_load_append_table_memory_fits ***\n')
     data = create_test_data()
     for i in range(4):
         data = np.concatenate((data, data))
     fitsio.write(self.fitsfile, data, clobber=True)
     self.assertTrue(os.path.exists(self.fitsfile))
     # memsize
     self.con.drop_table(self.tablename)
     self.assertTrue(self.con.load_table(
         self.fitsfile, name=self.tablename, memsize=self.memsize))
     cursor = self.con.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows * 16)
     # appending
     self.assertTrue(self.con.append_table(
         self.fitsfile, name=self.tablename, memsize=self.memsize))
     cursor = self.con.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows * 2 * 16)
     # end
     os.remove(self.fitsfile)
     self.con.drop_table(self.tablename)
Beispiel #25
0
def main():
    args = get_args()

    shear_true = [args.shear, 0.00]
    rng = np.random.RandomState(args.seed)

    # let's just do R11 for simplicity and to speed up this example; typically
    # the off diagonal terms are negligible, and R11 and R22 are usually
    # consistent

    dlist = []
    for i in progress(args.ntrial, miniters=10):

        obs = make_sim_obs(
            rng=rng, noise=args.noise, shear=shear_true, show=args.show,
        )
        tdata = get_all_moments(obs=obs, rng=rng)
        dlist.append(tdata)

    data = np.hstack(dlist)

    print_shear(data, shear_true)

    if args.output is not None:
        import fitsio
        print('writing:', args.output)
        fitsio.write(args.output, data, clobber=True)
Beispiel #26
0
    def _write_gaia_fits(infile):
        """read an input name for a csv file and write it to FITS"""
        outbase = os.path.basename(infile)
        outfilename = "{}.fits".format(outbase.split(".")[0])
        outfile = os.path.join(fitsdir, outfilename)
        fitstable = ascii.read(infile, format='csv')

        # ADM need to convert 5-string values to boolean.
        cols = np.array(fitstable.dtype.names)
        boolcols = cols[np.hstack(fitstable.dtype.descr)[1::2] == '<U5']
        for col in boolcols:
            fitstable[col] = fitstable[col] == 'true'

        # ADM only write out the columns we need for targeting.
        nobjs = len(fitstable)
        done = np.zeros(nobjs, dtype=ingaiadatamodel.dtype)
        for col in done.dtype.names:
            if col == 'REF_CAT':
                done[col] = 'G2'
            else:
                done[col] = fitstable[col.lower()]
        fitsio.write(outfile, done, extname='GAIAFITS')

        # ADM return the HEALPixels that this file touches.
        pix = set(radec2pix(nside, fitstable["ra"], fitstable["dec"]))
        return [pix, os.path.basename(outfile)]
Beispiel #27
0
    def _get_gaia_matches(fnwdir):
        '''wrapper on match_gaia_to_primary() given a file name'''
        # ADM extract the output file name.
        fn = os.path.basename(fnwdir)
        outfile = '{}/{}'.format(outdir, fn.replace(".fits", ender))

        # ADM read in the objects.
        objs, hdr = io.read_tractor(fnwdir, header=True)

        # ADM match to Gaia sources.
        gaiainfo = match_gaia_to_primary(objs)
        log.info(
            'Done with Gaia match for {} primary objects...t = {:.1f}s'.format(
                len(objs),
                time() - start))

        # ADM remove the GAIA_RA, GAIA_DEC columns as they aren't
        # ADM in the imaging surveys data model.
        gaiainfo = pop_gaia_coords(gaiainfo)

        # ADM add the Gaia column information to the sweeps array.
        for col in gaiainfo.dtype.names:
            objs[col] = gaiainfo[col]

        fitsio.write(outfile, objs, extname='SWEEP', header=hdr, clobber=True)
        return True
Beispiel #28
0
    def testTableWriteRead(self):
        """
        Test a basic table write, data and a header, then reading back in to
        check the values
        """

        fname=tempfile.mktemp(prefix='fitsio-TableWrite-',suffix='.fits')
        try:
            with fitsio.FITS(fname,'rw',clobber=True) as fits:

                try:
                    fits.write_table(self.data, header=self.keys, extname='mytable')
                    write_success=True
                except:
                    write_success=False

                self.assertTrue(write_success,"testing write does not raise an error")
                if not write_success:
                    skipTest("cannot test result if write failed")

                d = fits[1].read()
                self.compare_rec(self.data, d, "table read/write")

                h = fits[1].read_header()
                self.compare_headerlist_header(self.keys, h)

            # see if our convenience functions are working
            fitsio.write(fname, self.data2, 
                         extname="newext", 
                         header={'ra':335.2,'dec':-25.2})
            d = fitsio.read(fname, ext='newext')
            self.compare_rec(self.data2, d, "table data2")
            # now test read_column
            with fitsio.FITS(fname) as fits:

                for f in self.data.dtype.names:
                    d = fits[1].read_column(f)
                    self.compare_array(self.data[f], d, "table 1 single field read '%s'" % f)

                for f in self.data2.dtype.names:
                    d = fits['newext'].read_column(f)
                    self.compare_array(self.data2[f], d, "table 2 single field read '%s'" % f)

                # now list of columns
                for cols in [['u2scalar','f4vec','Sarr'],
                             ['f8scalar','u2arr','Sscalar']]:
                    d = fits[1].read(columns=cols)
                    for f in d.dtype.names: 
                        self.compare_array(self.data[f][:], d[f], "test column list %s" % f)


                    rows = [1,3]
                    d = fits[1].read(columns=cols, rows=rows)
                    for f in d.dtype.names: 
                        self.compare_array(self.data[f][rows], d[f], "test column list %s row subset" % f)

        finally:
            if os.path.exists(fname):
                #pass
                os.remove(fname)
Beispiel #29
0
    def go(self):
        """
        make fake catalog files for make-meds-input, and call
        the code to make the meds scripts etc.

        """

        tiles=numpy.unique( self.data['tilename'] )
        tiles.sort()

        ntile=len(tiles)
        for i,tile in enumerate(tiles):
            print '-'*70
            print '%s/%s' % (i+1, ntile)
            run = self.get_run(tile)
            out=self.make_fake_tile_cat(tile)

            # currently the same for all bands
            for band in ['g','r','i','z','Y']:
                fname=self.get_fake_tile_fname(tile, band)
                print 
                print fname
                fitsio.write(fname, out, clobber=True)

                self.make_scripts(run, band, fname)
Beispiel #30
0
    def _make_truth_catalog(self):
        """Make the truth catalog."""
        # always done with first band
        band = self.bands[0]
        coadd_wcs = get_esutil_wcs(
            image_path=self.info[band]['image_path'],
            image_ext=self.info[band]['image_ext'])

        ra, dec, x, y = make_coadd_grid_radec(
            rng=self.gal_rng, coadd_wcs=coadd_wcs,
            return_xy=True, n_grid=self.gal_kws['n_grid'])

        truth_cat = np.zeros(
            len(ra), dtype=[
                ('number', 'i8'),
                ('ra', 'f8'),
                ('dec', 'f8'),
                ('x', 'f8'),
                ('y', 'f8')])
        truth_cat['number'] = np.arange(len(ra)).astype(np.int64) + 1
        truth_cat['ra'] = ra
        truth_cat['dec'] = dec
        truth_cat['x'] = x
        truth_cat['y'] = y

        truth_cat_path = get_truth_catalog_path(
            meds_dir=self.output_meds_dir,
            medsconf=MEDSCONF,
            tilename=self.tilename)

        make_dirs_for_file(truth_cat_path)
        fitsio.write(truth_cat_path, truth_cat, clobber=True)

        return truth_cat
Beispiel #31
0
def match_ps1mds(matchRad=2.5):
    raise NotImplementedError
    pstiles = panstarrs_md_tiles(observed=True)
    for field, tiles in pstiles.items():
        stars = fitsio.read(ps1md_starfile(field))
        matches = match_objects(stars, tiles)
        fitsio.write('ps1%s_match.fits' % field, matches, clobber=True)
Beispiel #32
0
    def makeReferenceMatchesFromFits(self, refLoader, clobber=False):
        """
        Make an absolute reference match catalog, saving to fits.

        Parameters
        ----------
        refLoader: `object`
           Object which has refLoader.getFgcmReferenceStarsHealpix
        clobber: `bool`, optional
           Clobber existing absref catalog?  Default is False.
        """

        import fitsio

        refFile = self.starConfig['starfileBase'] + '_refcat.fits'

        if not clobber:
            if os.path.isfile(refFile):
                self.fgcmLog.info("Found %s" % (refFile))
                return refFile

        self.makeReferenceMatches(refLoader)

        fitsio.write(refFile, self.referenceCat, clobber=True)

        return refFile
Beispiel #33
0
def generate_jk_centers_from_mask(outfile, regionfile, nrand=1e5):

    with h5py.File(outfile, 'r') as f:
        mask = f['index/mask/hpix'][:]

    nside = 4096

    pmap = np.zeros(12 * nside**2)
    pmap[mask] = 1

    pmap = hu.DensityMap('nest', pmap)

    rand_ra, rand_dec = pmap.genrand(int(nrand), system='eq')

    centers = KMeans(n_clusters=1000,
                     random_state=0).fit(np.vstack([rand_ra, rand_dec]).T)

    centers = centers.cluster_centers_
    kdt = spatial.cKDTree(centers)
    dist, idx = kdt.query(centers, 2)
    centers_dist = np.zeros((1000, 3))
    centers_dist[:, :2] = centers
    centers_dist[:, 2] = dist[:, 1]

    fitsio.write(regionfile, centers_dist)
Beispiel #34
0
    def create_random_cat_finalise(label=''):
        """
    This function removes duplicate randoms from the results of create_random_cat() and writes a fits file with the random catalog.
    """
        import os

        def unique(a):
            order = np.lexsort(a.T)
            a = a[order]
            diff = np.diff(a, axis=0)
            ui = np.ones(len(a), 'bool')
            ui[1:] = (diff != 0).any(axis=1)
            return a[ui], ui

        a = np.vstack(
            (np.load(label + 'ra.npy'), np.load(label + 'dec.npy'))).T
        u, i = unique(a)
        a = a[i]

        ran = np.empty(len(a), dtype=[('ra', 'f8')] + [('dec', 'f8')])
        ran['ra'] = a[:, 0].T
        ran['dec'] = a[:, 1].T

        os.remove(label + 'ra.npy')
        os.remove(label + 'dec.npy')

        fio.write(label + 'random.fits.gz', ran, clobber=True)

        return
Beispiel #35
0
    def _write_urat_fits(infile):
        """read an input name for a csv file and write it to FITS"""
        outbase = os.path.basename(infile)
        outfilename = "{}.fits".format(outbase.split(".")[0])
        outfile = os.path.join(fitsdir, outfilename)
        # ADM astropy understands without specifying format='csv'.
        fitstable = ascii.read(infile)

        # ADM map the ascii-read csv to typical DESI quantities.
        nobjs = len(fitstable)
        done = np.zeros(nobjs, dtype=uratdatamodel.dtype)
        # ADM have to do this one-by-one, given the format.
        done["RA"] = fitstable['col1'] / 1000. / 3600.
        done["DEC"] = fitstable['col2'] / 1000. / 3600. - 90.
        done["PMRA"] = fitstable['col16'] / 10.
        done["PMDEC"] = fitstable['col17'] / 10.
        done["PM_ERROR"] = fitstable['col18'] / 10.
        done["APASS_G_MAG"] = fitstable['col36'] / 1000.
        done["APASS_R_MAG"] = fitstable['col37'] / 1000.
        done["APASS_I_MAG"] = fitstable['col38'] / 1000.
        done["APASS_G_MAG_ERROR"] = fitstable['col41'] / 1000.
        done["APASS_R_MAG_ERROR"] = fitstable['col42'] / 1000.
        done["APASS_I_MAG_ERROR"] = fitstable['col43'] / 1000.
        done["URAT_ID"] = fitstable['col46']

        fitsio.write(outfile, done, extname='URATFITS')

        # ADM return the HEALPixels that this file touches.
        pix = set(radec2pix(nside, done["RA"], done["DEC"]))
        return [pix, os.path.basename(outfile)]
Beispiel #36
0
def combine_ccds(ccds, output):
    """ Combines CCD annotated files
  
    """     
    columns = ['camera', 'filter', 'fwhm', 'mjd_obs', 'exptime', 
                'ra', 'dec', 'ra0','ra1','ra2','ra3','dec0','dec1','dec2','dec3',
                'galdepth', 'ebv', 'airmass', 'ccdskycounts', 'pixscale_mean', 'ccdzpt']
   
    dtype = np.dtype([('camera', '<U7'),('filter', '<U1'), ('exptime', '>f4'), ('mjd_obs', '>f8'), 
                      ('airmass', '>f4'), ('fwhm', '>f4'), ('ra', '>f8'), ('dec', '>f8'), ('ccdzpt', '>f4'),
                      ('ccdskycounts', '>f4'), ('ra0', '>f8'), ('dec0', '>f8'), ('ra1', '>f8'),
                      ('dec1', '>f8'), ('ra2', '>f8'), ('dec2', '>f8'), ('ra3', '>f8'), ('dec3', '>f8'),
                      ('pixscale_mean', '>f4'), ('ebv', '>f4'), ('galdepth', '>f4')])
   
    # read each ccd file > fix its dtype > move on to the next
    ccds_data = []
    for ccd_i in ccds:
        
        print('working on .... %s'%ccd_i.split('/')[-1])
        data_in = ft.FITS(ccd_i)[1].read(columns=columns)

        data_out = fixdtype(data_in, dtype)
        
        in_diff = np.setdiff1d(dtype.descr, data_in.dtype.descr)
        out_diff = np.setdiff1d(dtype.descr, data_out.dtype.descr)
        print(f'number of ccds in this file: {data_in.size}')
        print(f'different dtypes (before): {in_diff}')
        print(f'different dtypes (after): {out_diff}')
        ccds_data.append(data_out)    
        
    ccds_data_c = np.concatenate(ccds_data)
    print(f'Total number of combined ccds : {ccds_data_c.size}')
    
    ft.write(output, ccds_data_c, clobber=True)
    print(f'wrote the combined ccd file: {output}')
Beispiel #37
0
def write_astrometry_for_gfa_file(fn,
                                  out_dir,
                                  gnums=[0, 2, 3, 5, 7, 8],
                                  left=False,
                                  config_fn=None):
    hdr = fitsio.read_header(fn, ext=1)
    skyra = hdr['SKYRA']
    skydec = hdr['SKYDEC']
    expnum = hdr['EXPID']
    imgfns = []

    crpix_arg = '--crpix-center'

    for gnum in gnums:
        gname = 'GUIDE%i' % gnum
        g, x0, y0 = read_guide_image_file(fn, ext=gname)
        good = read_good_pix_maps(gname)
        imgfn = os.path.join(out_dir, 'gfa-%i-%s.fits' % (expnum, gname))
        g = g * good

        gh, gw = g.shape

        if left:
            g = g[:, :gw // 2]
            crpix_arg = '--crpix-x %.1f --crpix-y %.1f' % (gw + 0.5,
                                                           gh / 2 + 0.5)
        fitsio.write(imgfn, g, clobber=True)
        imgfns.append(imgfn)

    if out_dir == '':
        out_dir = '.'

    if config_fn is None:
        config_fn = commish_paths.an_config_filename

    # NOTE -- definitely want --tweak-order 1, otherwise (with
    # --tweak-order 0 or --no-tweak) we get a SQUARE CD matrix!!
    cmd = ''

    an_dir = commish_paths.an_path
    if an_dir is not None:
        cmd = ('PATH=%s/bin:${PATH} ' % an_dir) + cmd

    an_py_path = commish_paths.an_py_path
    if an_py_path is not None:
        cmd = ('PYTHONPATH=%s:${PYTHONPATH} ' % an_py_path) + cmd

    cmd += ((
        'solve-field --config %s --xscale 1.1' +
        ' --ra %f --dec %f --radius 2 --scale-low 0.18 --scale-high 0.24 --scale-units app --downsample 2'
        + ' --continue --tweak-order 1 --plot-scale 0.5 --objs 100' +
        ' --batch --dir %s %s ') %
            (config_fn, skyra, skydec, out_dir, crpix_arg))
    cmd += ' '.join(imgfns)
    print(cmd)
    os.system(cmd)

    wcsfns = [fn.replace('.fits', '.wcs') for fn in imgfns]
    wcsfns = [fn if os.path.exists(fn) else None for fn in wcsfns]
    return wcsfns, hdr
Beispiel #38
0
 def test_load_append_table_memory_chunk_fits(self):
     print('\n*** test_load_append_table_memory_chunk_fits ***\n')
     data = create_test_data()
     for i in range(4):
         data = np.concatenate((data, data))
     fitsio.write(self.fitsfile, data, clobber=True)
     self.assertTrue(os.path.exists(self.fitsfile))
     # memsize
     self.con.drop_table(self.tablename)
     command = "load_table %s --tablename %s --memsize %s --chunksize %s" % (
         self.fitsfile, self.tablename, self.memsize, self.chunk * 10)
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows * 16)
     # appending
     command = "append_table %s --tablename %s --memsize %s --chunksize %s" % (
         self.fitsfile, self.tablename, self.memsize, self.chunk * 200)
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows * 2 * 16)
     # end
     os.remove(self.fitsfile)
     self.con.drop_table(self.tablename)
Beispiel #39
0
def write_rgb():
    #g,r,z = [fitsio.read('detmap-%s.fits' % band) for band in 'grz']
    g,r,z = [fitsio.read('coadd-%s.fits' % band) for band in 'grz']

    plt.figure(figsize=(10,10))
    plt.subplots_adjust(left=0.05, right=0.95, bottom=0.05, top=0.95)

    plt.clf()
    for (im1,cc),scale in zip([(g,'b'),(r,'g'),(z,'r')],
                             [2.0, 1.2, 0.4]):
        im = im1 * scale
        im = im[im != 0]
        plt.hist(im.ravel(), histtype='step', color=cc,
                 range=[np.percentile(im, p) for p in (1,98)], bins=50)
    ps.savefig()
        
    #rgb = get_rgb_image(g,r,z, alpha=0.8, m=0.02)
    #rgb = get_rgb_image(g,r,z, alpha=16., m=0.005, m2=0.002,
    #rgb = get_rgb_image(g,r,z, alpha=32., m=0.01, m2=0.002,
    rgb = get_rgb_image(g,r,z, alpha=8., m=0.0, m2=0.0,
        scale_g = 2.,
        scale_r = 1.1,
        scale_z = 0.5,
        Q = 10)


    #for im in g,r,z:
    #    mn,mx = [np.percentile(im, p) for p in [20,99]]
    #    print 'mn,mx:', mn,mx
    
    plt.clf()
    plt.imshow(rgb, interpolation='nearest', origin='lower')
    ps.savefig()

    fitsio.write('rgb.fits', rgb)
Beispiel #40
0
def plotMassFunction(im, pm, outbase, mmin=9, mmax=13, mstep=0.05):
    """
    Make a comparison plot between the input mass function and the 
    predicted projected correlation function
    """
    plt.clf()

    nmbins = ( mmax - mmin ) / mstep
    mbins = np.logspace( mmin, mmax, nmbins )
    mcen = ( mbins[:-1] + mbins[1:] ) /2
    
    plt.xscale( 'log', nonposx = 'clip' )
    plt.yscale( 'log', nonposy = 'clip' )
    
    ic, e, p = plt.hist( im, mbins, label='Original Halos', alpha=0.5, normed = True)
    pc, e, p = plt.hist( pm, mbins, label='Added Halos', alpha=0.5, normed = True)
    
    plt.legend()
    plt.xlabel( r'$M_{vir}$' )
    plt.ylabel( r'$\frac{dN}{dM}$' )
    #plt.tight_layout()
    plt.savefig( outbase+'_mfcn.png' )
    
    mdtype = np.dtype( [ ('mcen', float), ('imcounts', float), ('pmcounts', float) ] )
    mf = np.ndarray( len(mcen), dtype = mdtype )
    mf[ 'mcen' ] = mcen
    mf[ 'imcounts' ] = ic
    mf[ 'pmcounts' ] = pc

    fitsio.write( outbase+'_mfcn.fit', mf )
Beispiel #41
0
 def test_load_append_table_memory_fits(self):
     print('\n*** test_load_append_table_memory_fits ***\n')
     data = create_test_data()
     for i in range(4):
         data = np.concatenate((data, data))
     fitsio.write(self.fitsfile, data, clobber=True)
     self.assertTrue(os.path.exists(self.fitsfile))
     # memsize
     self.con.drop_table(self.tablename)
     self.assertTrue(
         self.con.load_table(self.fitsfile,
                             name=self.tablename,
                             memsize=self.memsize))
     cursor = self.con.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows * 16)
     # appending
     self.assertTrue(
         self.con.append_table(self.fitsfile,
                               name=self.tablename,
                               memsize=self.memsize))
     cursor = self.con.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows * 2 * 16)
     ## end
     os.remove(self.fitsfile)
     self.con.drop_table(self.tablename)
Beispiel #42
0
def plotFeaturePDF(ift, pft, outbase, fmin=0.0, fmax=1.0, fstep=0.01):
    """
    Plot a comparison between the input feature distribution and the 
    feature distribution of the predicted halos
    """
    plt.clf()
    nfbins = ( fmax - fmin ) / fstep
    fbins = np.logspace( fmin, fmax, nfbins )
    fcen = ( fbins[:-1] + fbins[1:] ) / 2

    plt.xscale( 'log', nonposx='clip' )
    plt.yscale( 'log', nonposy='clip' )
    
    ic, e, p = plt.hist( ift, fbins, label='Original Halos', alpha=0.5, normed=True )
    pc, e, p = plt.hist( pft, fbins, label='Added Halos', alpha=0.5, normed=True )

    plt.legend()
    plt.xlabel( r'$\delta$' )
    plt.savefig( outbase+'_fpdf.png' )

    fdtype = np.dtype( [ ('fcen', float), ('ifcounts', float), ('pfcounts', float) ] )
    fd = np.ndarray( len(fcen), dtype = fdtype )
    fd[ 'mcen' ] = fcen
    fd[ 'imcounts' ] = ic
    fd[ 'pmcounts' ] = pc

    fitsio.write( outbase+'_fpdf.fit', fd )
Beispiel #43
0
    def test_load_append_table_fits(self):
        print('\n*** test_load_append_table_fits ***\n')
        data = create_test_data()
        fitsio.write(self.fitsfile, data, clobber=True)
        self.assertTrue(os.path.exists(self.fitsfile))
        self.con.drop_table(os.path.splitext(self.fitsfile)[0].upper())
        # name from filename
        command = "load_table %s " % self.fitsfile
        self.con.onecmd(command)
        cursor = self.con2.cursor()
        temp = cursor.execute('select RA,DEC from %s' %
                              os.path.splitext(self.fitsfile)[0].upper())
        fetch = temp.fetchall()
        self.assertEqual(len(fetch), self.nrows)

        # appending
        command = "append_table %s " % self.fitsfile
        self.con.onecmd(command)
        cursor = self.con2.cursor()
        temp = cursor.execute('select RA,DEC from %s' %
                              os.path.splitext(self.fitsfile)[0].upper())
        fetch = temp.fetchall()
        self.assertEqual(len(fetch), self.nrows * 2)
        self.con.drop_table(os.path.splitext(self.fitsfile)[0].upper())
        os.remove(self.fitsfile)
def create_test_fits(filename=None, data=None):
    if filename is None:
        filename = BASENAME + '.fits'
    if data is None:
        data = create_test_data()
    fitsio.write(filename, data)
    return filename
Beispiel #45
0
def gen_write_fits(file_name, col_names, columns):
    """Write some columns to an output FITS file with the given column names.

    :param file_name:   The name of the file to write to.
    :param col_names:   A list of columns names for the given columns.
    :param columns:     A list of numpy arrays with the data to write.
    """
    import numpy

    try:
        import fitsio
        data = numpy.empty(len(columns[0]), dtype=[ (name,'f8') for name in col_names ])
        for (name, col) in zip(col_names, columns):
            data[name] = col
        fitsio.write(file_name, data, clobber=True)
    except ImportError:
        try:
            import astropy.io.fits as pyfits
        except:
            import pyfits

        cols = pyfits.ColDefs([
            pyfits.Column(name=name, format='D', array=col)
            for (name, col) in zip(col_names, columns) ])

        # Depending on the version of pyfits, one of these should work:
        try:
            tbhdu = pyfits.BinTableHDU.from_columns(cols)
        except:
            tbhdu = pyfits.new_table(cols)
        tbhdu.writeto(file_name, clobber=True)
Beispiel #46
0
def _partition_gaia(filename=None, dr='dr1'):
    """Partition a Gaia source FITS files

    Parameters
    ----------
    filename: string
        base name of partition file
    dr : string
        name of data release (default 'dr1')

    Comments
    --------
    Breaks file into RA and Dec bins. Creates or adds to Gaia partition files..
    """
    gaia = fitsio.read(filename, ext=1)
    ira = np.int32(gaia['ra'])
    idec = np.int32((gaia['dec'] + 90.))
    isp = np.nonzero(idec == 0)[0]
    ira[isp] = 0
    inp = np.nonzero(idec == 179)[0]
    ira[inp] = 0
    ifile = idec * 360 + ira
    uniqs, iuniqs = np.unique(ifile, return_index=True)
    for iuniq in iuniqs:
        cra = ira[iuniq]
        cdec = idec[iuniq]
        cgaia = np.nonzero((ira == cra) & (idec == cdec))[0]
        filename = _partition_filename(cra, cdec, dr=dr)
        if (os.path.isfile(filename)):
            fits = fitsio.FITS(filename, 'rw')
            fits[-1].append(gaia[cgaia])
            fits.close()
        else:
            fitsio.write(filename, gaia[cgaia])
    return
Beispiel #47
0
def select_ndwfs_stars():
    ndwfsdir = '/global/scratch2/sd/imcgreer/ndwfs/DR3/matchedFITS/'
    dtype = [('number', 'i4'), ('autoMag', '3f4'), ('autoMagErr', '3f4'),
             ('ra', 'f8'), ('dec', 'f8'), ('rFWHM', 'f4'), ('rClass', 'f4')]
    starcat = []
    rcols = [
        'NUMBER', 'MAG_AUTO', 'MAGERR_AUTO', 'ALPHA_J2000', 'DELTA_J2000',
        'FWHM_IMAGE', 'CLASS_STAR'
    ]
    cols = ['MAG_AUTO', 'MAGERR_AUTO']
    for dec1 in range(32, 36):
        catfn = lambda b: 'NDWFS_%s_%d_%d_cat_m.fits.gz' % (b, dec1, dec1 + 1)
        rfits = fitsio.FITS(ndwfsdir + catfn('R'))
        bfits = fitsio.FITS(ndwfsdir + catfn('Bw'))
        ifits = fitsio.FITS(ndwfsdir + catfn('I'))
        w = rfits[1].where('FWHM_IMAGE < 7 && MAG_AUTO < 24.0 && FLAGS == 0')
        print len(w)
        rcat = rfits[1].read(rows=w, columns=rcols)
        bcat = bfits[1].read(rows=w, columns=cols)
        icat = ifits[1].read(rows=w, columns=cols)
        stars = np.empty(len(w), dtype=dtype)
        stars['number'] = rcat['NUMBER']
        stars['ra'] = rcat['ALPHA_J2000']
        stars['dec'] = rcat['DELTA_J2000']
        stars['rFWHM'] = rcat['FWHM_IMAGE']
        stars['rClass'] = rcat['CLASS_STAR']
        for j, cat in enumerate([bcat, rcat, icat]):
            stars['autoMag'][:, j] = cat['MAG_AUTO']
            stars['autoMagErr'][:, j] = cat['MAGERR_AUTO']
        starcat.append(stars)
    starcat = np.concatenate(starcat)
    fitsio.write(ndwfs_starfile, starcat, clobber=True)
Beispiel #48
0
    def _select_stars(self):
        import fitsio
        print("selecting stars")

        ntile=len(self.alldata)
        dt=[('tilename','S30'),
            ('ra','f8'),
            ('dec','f8'),
            ('nstars','i4')]

        stardata=numpy.zeros(ntile,dtype=dt)

        for i,dd in enumerate(self.alldata):
            cat=dd['data']

            w,=numpy.where(  (cat['flags']==0)
                           & (numpy.abs(cat['spread_model']) < 0.002)
                           & (cat['mag_auto'] < 19) & (cat['mag_auto'] > 16) )

            print("    ",dd['coadd_run'])
            print("        nstars:",w.size)
            dd['wstars'] = w

            stardata['tilename'][i] = dd['tilename']
            stardata['ra'][i] = numpy.median(cat['alphawin_j2000'][w])
            stardata['dec'][i] = numpy.median(cat['deltawin_j2000'][w])
            stardata['nstars'][i] = w.size

        self.stardata=stardata

        sdfile=get_stardata_file(self.release)
        print(sdfile)
        fitsio.write(sdfile,self.stardata,clobber=True)
Beispiel #49
0
    def save_weights(cat, val, zbin, w, bins, mask0):

        for i in xrange(cat.sbins):
            if cat.cat == 'mcal':
                mask = bins[i][np.in1d(bins[i],
                                       np.where(w != 0)[0],
                                       assume_unique=False)]
            else:
                mask = (bins == i) & mask0 & (w != 0)
            pix = catalog.CatalogMethods.radec_to_hpix(cat.ra[mask],
                                                       cat.dec[mask],
                                                       nside=4096,
                                                       nest=False)
            upix = np.unique(pix)
            w0 = w[mask]
            w1 = np.bincount(pix, weights=w0)
            w2 = np.bincount(pix, weights=w0 * w0)
            mask = np.where(w1 != 0)[0]
            # upix = upix[mask]
            w1 = w1[mask] / np.bincount(pix)[mask]
            w2 = w2[mask] / np.bincount(pix)[mask]

            out = np.empty(len(upix),
                           dtype=[('pix', int)] + [('weight', 'f4')] +
                           [('weightsq', 'f4')])
            out['pix'] = upix
            out['weight'] = w1
            out['weightsq'] = w2
            fio.write('text/pzrw_' + cat.name + '_' + val + '_' +
                      str(zbin + 1) + '_' + str(i) + '.fits.gz',
                      out,
                      clobber=True)

        return
Beispiel #50
0
    def save(self, filename):
        """Save to a FITS file

        :Parameters:
            - `filename`: the name of the FITS file
        """
        fitsio.write(filename, self.mask, header=self.header,
                     clobber=True)
Beispiel #51
0
 def write(self, fname):
     """
     write to output file
     """
     files.makedir_fromfile(fname)
     data=self.get_data()
     print("writing:",fname)
     fitsio.write(fname, self.data, clobber=True)
Beispiel #52
0
  def build_special_points_fits(): 
    """
    Combines parts of special points catalog into single fits catalog.
    """

    import fitsio as fio

    tmp=fio.FITS(config.wcsfile)[-1].read()
    a=np.sort(np.unique(tmp['expnum']))
    b=np.sort(np.unique(tmp['ccdnum']))-1
    store=np.empty((len(a)*(len(b)+1)),dtype=[('exposure',int)]+[('ccd',int)]+[('type',int)]+[('ra','f8')]+[('dec','f8')])
    for i in range(len(a)):
      store['exposure'][i*len(b):(i+1)*len(b)]=a[i]
      for j in range(len(b)):
        store['ccd'][i*len(b)+j]=b[j]

    for i in range(40):
      print i
      tmp=np.genfromtxt('y1a1_special_points_'+str(i)+'.txt',names=['index','exposure','ccd','racenter','deccenter','rall','decll','raul','decul','ralr','declr','raur','decur'])
      mask1=(store['exposure']==tmp['exposure'][j])
      for j in range(len(tmp)):
        if j%1000==0:
          print j
        mask=mask1&(store['ccd']==tmp['ccd'][j])
        if tmp['racenter'][j]!=999:
          store['type'][mask]=0
          store['ra'][mask]=tmp['racenter'][j]
          store['dec'][mask]=tmp['deccenter'][j]
        if tmp['rall'][j]!=999:
          store['type'][mask]=1
          store['ra'][mask]=tmp['rall'][j]
          store['dec'][mask]=tmp['decll'][j]
        if tmp['raul'][j]!=999:
          store['type'][mask]=2
          store['ra'][mask]=tmp['raul'][j]
          store['dec'][mask]=tmp['decul'][j]
        if tmp['ralr'][j]!=999:
          store['type'][mask]=3
          store['ra'][mask]=tmp['ralr'][j]
          store['dec'][mask]=tmp['declr'][j]
        if tmp['raur'][j]!=999:
          store['type'][mask]=4
          store['ra'][mask]=tmp['raur'][j]
          store['dec'][mask]=tmp['decur'][j]

    for i in range(len(a)):
      if i%1000==0:
        print i
      store['exposure'][len(a)*len(b)+i]=a[i]
      store['ccd'][len(a)*len(b)+i]=-1
      store['type'][len(a)*len(b)+i]=-1
      mask=(store['exposure']==store['exposure'][len(a)*len(b)+i])&(store['type']==0)&((store['ccd']==27)|(store['ccd']==34))
      store['ra'][len(a)*len(b)+i]=np.mean(store['ra'][mask])
      store['dec'][len(a)*len(b)+i]=np.mean(store['dec'][mask])

    fio.write(config.spointsfile,store,clobber=True)

    return
Beispiel #53
0
  def save_cat(cat):

    for x in dir(cat):
      obj = getattr(cat,x)
      if isinstance(obj,np.ndarray):
        if len(obj)==len(cat.coadd):
          fio.write(x+'.fits.gz',obj,clobber=True)

    return
Beispiel #54
0
def calc_branch_sky_noise(**keys):
    """
    Calculate the sky noise in all images from the specified branch
    """
    import fitsio

    nsub=files.get_nsub(**keys)

    deep=keys.get('deep',False)
    if deep:
        outfile=files.get_deep_skynoise_file(**keys)

        fmin=-0.08
        fmax= 0.035
        binsize=0.001

    else:
        outfile=files.get_skynoise_file(**keys)
        fmin=-0.3
        fmax= 0.15
        binsize=0.005



    print("will write to:",outfile)
    d=files.get_skynoise_plot_dir(**keys)
    if not os.path.exists(d):
        os.makedirs(d)

    out=numpy.zeros(1,dtype=[('subid','f8'),
                             ('skysig','f8'),
                             ('skysig_err','f8')])

    if deep:
        im=files.read_deep_gal_image(**keys)
        plot_file=files.get_deep_skynoise_plot_file(**keys)
    else:
        im=files.read_gal_image(**keys)
        plot_file=files.get_skynoise_plot_file(**keys)

    gf = get_sky_noise(im,fmin,fmax,binsize)
    res=gf.get_result()
    plt=gf.make_plot(show=False)

    print("    writing:",plot_file)
    plt.write_eps(plot_file)

    pars=res['pars']
    perr=res['perr']

    print('    %.3g +/- %.3g' % (pars[1],perr[1]))
    out['subid']=keys['subid']
    out['skysig']=pars[1]
    out['skysig_err']=perr[1]

    print("writing:",outfile)
    fitsio.write(outfile, out, clobber=True)
 def writeFits(self,outPath,im_data,header,objName,band,im_type,first):
     if not first: fits2 = fitsio.FITS(outPath,'rw')
     header['BAND'] = band
     header['OBJECT'] = objName
     header['TYPE'] = im_type
     if first:
         fitsio.write(outPath,im_data,header=header)
     else:
         fits2.write(im_data,header=header)
Beispiel #56
0
def write_membership(loglike,filename):
    """
    Write a catalog file of the likelihood region including
    membership properties.

    Parameters:
    -----------
    loglike : input loglikelihood object
    filename : output filename
    
    Returns:
    --------
    None
    """

    ra,dec = gal2cel(loglike.catalog.lon,loglike.catalog.lat)
        
    name_objid = loglike.config['catalog']['objid_field']
    name_mag_1 = loglike.config['catalog']['mag_1_field']
    name_mag_2 = loglike.config['catalog']['mag_2_field']
    name_mag_err_1 = loglike.config['catalog']['mag_err_1_field']
    name_mag_err_2 = loglike.config['catalog']['mag_err_2_field']

    # Angular and isochrone separations
    sep = angsep(loglike.source.lon,loglike.source.lat,
                 loglike.catalog.lon,loglike.catalog.lat)
    isosep = loglike.isochrone.separation(loglike.catalog.mag_1,loglike.catalog.mag_2)

    data = odict()
    data[name_objid] = loglike.catalog.objid
    data['GLON'] = loglike.catalog.lon
    data['GLAT'] = loglike.catalog.lat
    data['RA']   = ra
    data['DEC']  = dec
    data[name_mag_1] = loglike.catalog.mag_1
    data[name_mag_err_1] = loglike.catalog.mag_err_1
    data[name_mag_2] = loglike.catalog.mag_2
    data[name_mag_err_2] = loglike.catalog.mag_err_2
    data['COLOR'] = loglike.catalog.color
    data['ANGSEP'] = sep
    data['ISOSEP'] = isosep
    data['PROB'] = loglike.p

    # HIERARCH allows header keywords longer than 8 characters
    header = []
    for param,value in loglike.source.params.items():
        card = dict(name='HIERARCH %s'%param.upper(),
                    value=value.value,
                    comment=param)
        header.append(card)
    card = dict(name='HIERARCH %s'%'TS',value=loglike.ts(),
                comment='test statistic')
    header.append(card)
    card = dict(name='HIERARCH %s'%'TIMESTAMP',value=time.asctime(),
                comment='creation time')
    header.append(card)
    fitsio.write(filename,data,header=header,clobber=True)
Beispiel #57
0
    def write(self, outfile):
        """
        Save the likelihood results as a sparse HEALPix map.
        """
        data = odict()
        data['PIXEL']=self.roi.pixels_target
        # Full data output (too large for survey)
        if self.config['scan']['full_pdf']:
            data['LOG_LIKELIHOOD']=self.log_likelihood_sparse_array.T
            data['RICHNESS']=self.richness_sparse_array.T
            data['RICHNESS_LOWER']=self.richness_lower_sparse_array.T
            data['RICHNESS_UPPER']=self.richness_upper_sparse_array.T
            data['RICHNESS_LIMIT']=self.richness_upper_limit_sparse_array.T
            #data['STELLAR_MASS']=self.stellar_mass_sparse_array.T
            data['FRACTION_OBSERVABLE']=self.fraction_observable_sparse_array.T
        else:
            data['LOG_LIKELIHOOD']=self.log_likelihood_sparse_array.T
            data['RICHNESS']=self.richness_sparse_array.T
            data['FRACTION_OBSERVABLE']=self.fraction_observable_sparse_array.T

        # Convert to 32bit float
        for k in list(data.keys())[1:]:
            data[k] = data[k].astype('f4',copy=False)
            
        # Stellar mass can be calculated from STELLAR * RICHNESS
        header = odict()
        header['STELLAR']=round(self.stellar_mass_conversion,8)
        header['LKDNSIDE']=self.config['coords']['nside_likelihood']
        header['LKDPIX']=ang2pix(self.config['coords']['nside_likelihood'],
                                 self.roi.lon,self.roi.lat)
        header['NROI']=self.roi.inROI(self.loglike.catalog_roi.lon,
                                      self.loglike.catalog_roi.lat).sum()
        header['NANNULUS']=self.roi.inAnnulus(self.loglike.catalog_roi.lon,
                                              self.loglike.catalog_roi.lat).sum()
        header['NINSIDE']=self.roi.inInterior(self.loglike.catalog_roi.lon,
                                              self.loglike.catalog_roi.lat).sum()
        header['NTARGET']=self.roi.inTarget(self.loglike.catalog_roi.lon,
                                            self.loglike.catalog_roi.lat).sum()

        # Flatten if there is only a single distance modulus
        # ADW: Is this really what we want to do?
        if len(self.distance_modulus_array) == 1:
            for key in data:
                data[key] = data[key].flatten()

        logger.info("Writing %s..."%outfile)
        write_partial_map(outfile,data,
                          nside=self.config['coords']['nside_pixel'],
                          header=header,
                          clobber=True
                          )
        
        fitsio.write(outfile,
                     dict(DISTANCE_MODULUS=self.distance_modulus_array.astype('f4',copy=False)),
                     extname='DISTANCE_MODULUS',
                     clobber=False)
Beispiel #58
0
def inject(kicid, rng=6):
    # Download the data.
    client = kplr.API()
    kic = client.star(kicid)
    lcs = kic.get_light_curves(short_cadence=False)
    lc = lcs[np.random.randint(len(lcs))]

    # Read the data.
    data = lc.read()
    t = data["TIME"]
    f = data["SAP_FLUX"]
    fe = data["SAP_FLUX_ERR"]
    q = data["SAP_QUALITY"]

    # Remove missing points.
    m = np.isfinite(t) * np.isfinite(f) * np.isfinite(fe) * (q == 0)
    t, f, fe = t[m], f[m], fe[m]
    t -= t.min()

    # Build the transit system.
    s = transit.System(transit.Central(q1=np.random.rand(),
                                       q2=np.random.rand()))
    body = transit.Body(period=365.25, b=np.random.rand(), r=0.04,
                        t0=np.random.uniform(t.max()))
    s.add_body(body)

    # Compute the transit model.
    texp = kplr.EXPOSURE_TIMES[1] / 86400.0  # Long cadence exposure time.
    model = s.light_curve(t, texp=texp)
    f *= model

    # Trim the dataset to include data only near the transit.
    m = np.abs(t - body.t0) < rng
    t, f, fe = t[m], f[m], fe[m]
    t -= body.t0

    # Save the injection as a FITS light curve.
    dt = [("TIME", float), ("SAP_FLUX", float), ("SAP_FLUX_ERR", float)]
    data = np.array(zip(t, f, fe), dtype=dt)
    hdr = dict(b=body.b, period=body.period, r=body.r, t0=0.0,
               q1=s.central.q1, q2=s.central.q2)
    fitsio.write("{0}-injection.fits".format(kicid), data, header=hdr,
                 clobber=True)

    # Plot the light curve.
    ppm = (f / np.median(f) - 1) * 1e6
    fig = pl.figure(figsize=(6, 6))
    ax = fig.add_subplot(111)
    ax.plot(t, ppm, ".k")
    ax.set_xlim(-rng, rng)
    ax.set_xlabel("time since transit [days]")
    ax.set_ylabel("relative flux [ppm]")
    ax.set_title("raw light curve")
    fig.subplots_adjust(left=0.2, bottom=0.2, top=0.9, right=0.9)
    fig.savefig("{0}-raw.pdf".format(kicid))
Beispiel #59
0
 def test_load_table_fits(self):
     data = create_test_data()
     fitsio.write(self.fitsfile, data, clobber=True)
     self.assertTrue(os.path.exists(self.fitsfile))
     self.con.drop_table(os.path.splitext(self.fitsfile)[0].upper())
     # name from filename
     command = "load_table %s " % self.fitsfile
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % os.path.splitext(self.fitsfile)[0].upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows)
     
     # appending
     command = "append_table %s " % self.fitsfile
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % os.path.splitext(self.fitsfile)[0].upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows*2)
     self.con.drop_table(os.path.splitext(self.fitsfile)[0].upper())
     
     # name from tablename
     self.con.drop_table(self.tablename)
     command = "load_table %s --tablename %s" % (self.fitsfile, self.tablename)
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows)
     # appending
     command = "append_table %s --tablename %s" % (self.fitsfile, self.tablename)
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows*2)
     # chunksize
     self.con.drop_table(self.tablename)
     command = "load_table %s --tablename %s --chunksize %s" % (self.fitsfile, self.tablename, self.chunk)
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows)
     ## appending
     command = "append_table %s --tablename %s --chunksize %s" % (self.fitsfile, self.tablename, self.chunk)
     self.con.onecmd(command)
     cursor = self.con2.cursor()
     temp = cursor.execute('select RA,DEC from %s' % self.tablename.upper())
     fetch = temp.fetchall()
     self.assertEqual(len(fetch), self.nrows*2)
     os.remove(self.fitsfile)
     self.con.drop_table(self.tablename)
Beispiel #60
0
    def writeCandidates(self,filename=None):
        if filename is None: filename = self.candfile

        threshold = self.config['search']['cand_threshold']
        select  = (self.assocs['CUT']==0)
        select &= (self.assocs['TS']>threshold)
        #select &= (self.assocs['ASSOC2']=='')

        self.candidates = self.assocs[select]
        logger.info("Writing %s..."%filename)
        fitsio.write(filename,self.candidates,clobber=True)