Exemple #1
0
def ebv(ra, dec, ebvmap=None):
    ra = np.atleast_1d(ra)
    dec = np.atleast_1d(dec)

    if not len(ra) == len(dec):
        msg = "Column lengths must match"
        raise Exception(msg)

    if ebvmap is None or ebvmap.lower() == 'sfd':
        # Download SFD map
        url = "http://lambda.gsfc.nasa.gov/data/foregrounds/SFD/lambda_sfd_ebv.fits"
        logger.info("Downloading %s..." % url)
        filename = tempfile.NamedTemporaryFile().name
        cmd = "wget %s -O %s" % (url, filename)
        subprocess.call(cmd, shell=True)
        ebvmap = healpy.read_map(filename)
        os.remove(filename)
    elif isinstance(ebvmap, basestring):
        logger.info("Loading %s..." % ebvmap)
        ebvmap = healpy.read_map(ebvmap)
    else:
        msg = "Unrecognized ebv: %s" % ebvmap
        raise Exception(msg)

    # The SFD map is in Galactic coordinates
    glon, glat = cel2gal(ra, dec)
    ebv = healpix.get_interp_val(ebvmap, glon, glat)
    return ebv
def run(self):
    if 'pixelize' in self.opts.run:
        # Pixelize the raw catalog data
        logger.info("Running 'pixelize'...")
        rawdir = self.config['data']['dirname']
        rawfiles = sorted(glob.glob(os.path.join(rawdir,'*.fits')))
        x = ugali.preprocess.pixelize.pixelizeCatalog(rawfiles,self.config)
    if 'density' in self.opts.run:
        # Calculate magnitude limits
        logger.info("Running 'density'...")
        x = ugali.preprocess.pixelize.pixelizeDensity(self.config,nside=2**9,force=self.opts.force)
    if 'maglims' in self.opts.run:
        # Calculate magnitude limits
        logger.info("Running 'maglims'...")
        maglims = ugali.preprocess.maglims.Maglims(self.config)
        x = maglims.run(force=self.opts.force)
    if 'simple' in self.opts.run:
        # Calculate simple magnitude limits
        logger.info("Running 'simple'...")
        #ugali.preprocess.maglims.simple_maglims(self.config,force=self.opts.force)
        maglims = ugali.preprocess.maglims.Maglims(self.config)
        x = maglims.run(simple=True,force=self.opts.force)
    if 'split' in self.opts.run:
        logger.info("Running 'split'...")
        ugali.preprocess.maglims.simple_split(self.config,'split',force=self.opts.force)
Exemple #3
0
    def writeCandidates(self, filename=None):
        if filename is None: filename = self.candfile

        threshold = self.config['search']['cand_threshold']
        select = (self.assocs['CUT'] == 0)
        select &= (self.assocs['TS'] > threshold)
        #select &= (self.assocs['ASSOC2']=='')

        self.candidates = self.assocs[select]
        # ADW: View as a recarray or selection doesn't work.
        # Why? I don't know, and I'm slightly terrified...
        hdu = pyfits.new_table(self.candidates.view(np.recarray))
        logger.info("Writing %s..." % filename)
        hdu.writeto(filename, clobber=True)

        # Dump to txt file
        if which('fdump'):
            txtfile = filename.replace('.fits', '.txt')
            columns = ['NAME', 'TS', 'GLON', 'GLAT', 'DISTANCE', 'MASS']
            cmd = 'fdump %(infile)s %(outfile)s columns="%(columns)s" rows="-" prhead="no" showcol="yes" clobber="yes" pagewidth="256" fldsep=" " showrow="no"' % (
                dict(infile=filename,
                     outfile=txtfile,
                     columns=','.join(columns)))
            print cmd
            subprocess.call(cmd, shell=True)
Exemple #4
0
    def pdf_mmd(self, lon, lat, mag_1, mag_2, distance_modulus, mask, delta_mag=0.03, steps=1000):
        """
        Ok, now here comes the beauty of having the signal MMD.
        """
        logger.info('Running MMD pdf')
 
        roi = mask.roi
        mmd = self.signalMMD(mask,distance_modulus,delta_mag=delta_mag,mass_steps=steps)
        
        # This is fragile, store this information somewhere else...
        nedges = np.rint((roi.bins_mag[-1]-roi.bins_mag[0])/delta_mag)+1
        edges_mag,delta_mag = np.linspace(roi.bins_mag[0],roi.bins_mag[-1],nedges,retstep=True)
                                    
        idx_mag_1 = np.searchsorted(edges_mag,mag_1)
        idx_mag_2 = np.searchsorted(edges_mag,mag_2)
 
        if np.any(idx_mag_1 > nedges) or np.any(idx_mag_1 == 0):
            msg = "Magnitude out of range..."
            raise Exception(msg)
        if np.any(idx_mag_2 > nedges) or np.any(idx_mag_2 == 0):
            msg = "Magnitude out of range..."
            raise Exception(msg)
 
        idx = mask.roi.indexROI(lon,lat)
        u_color = mmd[(mask.mask_roi_digi[idx],idx_mag_1,idx_mag_2)]
 
        # Remove the bin size to convert the pdf to units of mag^-2
        u_color /= delta_mag**2
 
        return u_color
Exemple #5
0
    def observableFractionMMD(self, mask, distance_modulus, mass_min=0.1):
        # This can be done faster...
        logger.info('Calculating observable fraction from MMD')

        mmd = self.signalMMD(mask,distance_modulus)
        obs_frac = mmd.sum(axis=-1).sum(axis=-1)[mask.mask_roi_digi[mask.roi.pixel_interior_cut]]
        return obs_frac
def run(self):
    # The three mask options are (semi-)mutually exclusive
    if np.in1d(['maglims', 'simple', 'split'], self.opts.run).sum() > 1:
        raise Exception("Too many 'mask' run options.")

    if 'pixelize' in self.opts.run:
        # Pixelize the raw catalog data
        logger.info("Running 'pixelize'...")
        rawdir = self.config['data']['dirname']
        rawfiles = sorted(glob.glob(os.path.join(rawdir, '*.fits')))
        x = ugali.preprocess.pixelize.pixelizeCatalog(rawfiles, self.config)
    if 'density' in self.opts.run:
        # Calculate magnitude limits
        logger.info("Running 'density'...")
        x = ugali.preprocess.pixelize.pixelizeDensity(self.config,
                                                      nside=512,
                                                      force=self.opts.force)
    if 'maglims' in self.opts.run:
        # Calculate magnitude limits
        logger.info("Running 'maglims'...")
        maglims = ugali.preprocess.maglims.Maglims(self.config)
        x = maglims.run(force=self.opts.force)
    if 'simple' in self.opts.run:
        # Calculate simple magnitude limits
        logger.info("Running 'simple'...")
        #ugali.preprocess.maglims.simple_maglims(self.config,force=self.opts.force)
        maglims = ugali.preprocess.maglims.Maglims(self.config)
        x = maglims.run(simple=True, force=self.opts.force)
    if 'split' in self.opts.run:
        # Split up a pre-existing maglim map
        logger.info("Running 'split'...")
        ugali.preprocess.maglims.split(self.config,
                                       'split',
                                       force=self.opts.force)
Exemple #7
0
def pixelizeDensity(config, nside=None, force=False):
    if nside is None: 
        nside = config['coords']['nside_likelihood']
    coordsys = config['coords']['coordsys'].upper()
    filenames = config.getFilenames()
    infiles = filenames[~filenames['catalog'].mask]
    lon_field = config['catalog']['lon_field'].upper()
    lat_field = config['catalog']['lat_field'].upper()

    for ii,f in enumerate(infiles.data):
        infile = f['catalog']
        pix = f['pix']
        logger.info('(%i/%i) %s'%(ii+1, len(infiles), infile))

        outfile = config['data']['density']%pix
        if os.path.exists(outfile) and not force: 
            logger.info("Found %s; skipping..."%outfile)
            continue
            
        outdir = mkdir(os.path.dirname(outfile))
        pixels, density = stellarDensity(infile,nside,
                                         lon_field=lon_field,lat_field=lat_field)

        data = dict(PIXEL=pixels,DENSITY=density)
        healpix.write_partial_map(outfile,data,nside=nside,coord=coordsys[0])
Exemple #8
0
    def _setup_cmd(self, mode='cloud-in-cells'):
        """
        The purpose here is to create a more finely binned
        background CMD to sample from.
        """
        # Only setup once...
        if hasattr(self, 'bkg_lambda'): return

        logger.info("Setup color...")
        # In the limit theta->0: 2*pi*(1-cos(theta)) -> pi*theta**2
        # (Remember to convert from sr to deg^2)
        #solid_angle_roi = sr2deg(2*np.pi*(1-np.cos(np.radians(self.roi_radius))))
        solid_angle_roi = self.roi.area_pixel * len(self.roi.pixels)

        # Large CMD bins cause problems when simulating
        config = Config(self.config)
        config['color']['n_bins'] *= 5  #10
        config['mag']['n_bins'] *= 1  #2
        #config['mask']['minimum_solid_angle'] = 0
        roi = ugali.analysis.loglike.createROI(config, self.roi.lon,
                                               self.roi.lat)
        mask = ugali.analysis.loglike.createMask(config, roi)

        self.bkg_centers_color = roi.centers_color
        self.bkg_centers_mag = roi.centers_mag

        # Background CMD has units: [objs / deg^2 / mag^2]
        cmd_background = mask.backgroundCMD(self.catalog, mode)

        self.bkg_lambda = cmd_background * solid_angle_roi * roi.delta_color * roi.delta_mag
        np.sum(self.bkg_lambda)

        # Clean up
        del config, roi, mask
Exemple #9
0
 def run(self):
     logger.info("Testing pipeline...")
     if 'test' in self.opts.run:
         logger.info("  This should run.")
     if 'foo' in self.opts.run:
         logger.error("  This should NOT run")
         raise Exception
Exemple #10
0
def pixelizeCatalog(infiles, config, force=False):
    """
    Break catalog into chunks by healpix pixel.
    
    Parameters:
    -----------
    infiles : List of input files
    config  : Configuration file
    force   : Overwrite existing files (depricated)
    
    Returns:
    --------
    None
    """
    nside_catalog = config['coords']['nside_catalog']
    nside_pixel = config['coords']['nside_pixel']
    outdir = mkdir(config['catalog']['dirname'])
    filenames = config.getFilenames()

    for i, filename in enumerate(infiles):
        logger.info('(%i/%i) %s' % (i + 1, len(infiles), filename))
        data = fitsio.read(filename)
        logger.info("%i objects found" % len(data))
        if not len(data): continue

        glon, glat = cel2gal(data['RA'], data['DEC'])
        cat_pix = ang2pix(nside_catalog, glon, glat)
        pix_pix = ang2pix(nside_pixel, glon, glat)
        cat_pix_name = 'PIX%i' % nside_catalog
        pix_pix_name = 'PIX%i' % nside_pixel

        data = mlab.rec_append_fields(
            data,
            names=['GLON', 'GLAT', cat_pix_name, pix_pix_name],
            arrs=[glon, glat, cat_pix, pix_pix],
            dtypes=['f4', 'f4', int, int])

        for pix in np.unique(cat_pix):
            logger.debug("Processing pixel %s" % pix)

            arr = data[cat_pix == pix]
            outfile = filenames.data['catalog'][pix]

            if not os.path.exists(outfile):
                logger.debug("Creating %s" % outfile)
                out = fitsio.FITS(outfile, mode='rw')
                out.write(arr)
                hdr = ugali.utils.healpix.header_odict(nside=nside_catalog,
                                                       coord='G')
                for key in ['PIXTYPE', 'ORDERING', 'NSIDE', 'COORDSYS']:
                    out[1].write_key(*list(hdr[key].values()))
                out[1].write_key('PIX',
                                 pix,
                                 comment='HEALPIX pixel for this file')
            else:
                out = fitsio.FITS(outfile, mode='rw')
                out[1].append(arr)

            logger.debug("Writing %s" % outfile)
            out.close()
Exemple #11
0
    def run(self, field=None, simple=False, force=False):
        """
        Loop through pixels containing catalog objects and calculate
        the magnitude limit. This gets a bit convoluted due to all
        the different pixel resolutions...
        """
        if field is None: fields = [1, 2]
        else: fields = [field]
        for filenames in self.filenames.compress(
                ~self.filenames.mask['catalog']).data:
            infile = filenames['catalog']
            for f in fields:
                outfile = filenames['mask_%i' % f]
                if os.path.exists(outfile) and not force:
                    logger.info("Found %s; skipping..." % outfile)
                    continue

                pixels, maglims = self.calculate(infile, f, simple)
                logger.info("Creating %s" % outfile)
                outdir = mkdir(os.path.dirname(outfile))
                data = odict()
                data['PIXEL'] = pixels
                data['MAGLIM'] = maglims.astype('f4')
                ugali.utils.healpix.write_partial_map(outfile, data,
                                                      self.nside_pixel)
Exemple #12
0
    def pdf_mmd(self, lon, lat, mag_1, mag_2, distance_modulus, mask, delta_mag=0.03, steps=1000):
        """
        Ok, now here comes the beauty of having the signal MMD.
        """
        logger.info('Running MMD pdf')
 
        roi = mask.roi
        mmd = self.signalMMD(mask,distance_modulus,delta_mag=delta_mag,mass_steps=steps)
        
        # This is fragile, store this information somewhere else...
        nedges = np.rint((roi.bins_mag[-1]-roi.bins_mag[0])/delta_mag)+1
        edges_mag,delta_mag = np.linspace(roi.bins_mag[0],roi.bins_mag[-1],nedges,retstep=True)
                                    
        idx_mag_1 = np.searchsorted(edges_mag,mag_1)
        idx_mag_2 = np.searchsorted(edges_mag,mag_2)
 
        if np.any(idx_mag_1 > nedges) or np.any(idx_mag_1 == 0):
            msg = "Magnitude out of range..."
            raise Exception(msg)
        if np.any(idx_mag_2 > nedges) or np.any(idx_mag_2 == 0):
            msg = "Magnitude out of range..."
            raise Exception(msg)
 
        idx = mask.roi.indexROI(lon,lat)
        u_color = mmd[(mask.mask_roi_digi[idx],idx_mag_1,idx_mag_2)]
 
        # Remove the bin size to convert the pdf to units of mag^-2
        u_color /= delta_mag**2
 
        return u_color
Exemple #13
0
    def download(self, pixel, outdir=None, force=False):
        if outdir is None: outdir = './'
        else: mkdir(outdir)
        sqldir = mkdir(os.path.join(outdir, 'sql'))
        self._setup_casjobs()

        basename = self.basename + "_%04d" % pixel['name']
        sqlname = os.path.join(sqldir, basename + '.sql')
        dbname = basename + '_output'
        taskname = basename
        outfile = os.path.join(outdir, basename + ".fits")
        if os.path.exists(outfile) and not force:
            logger.warning("Found %s; skipping..." % (outfile))
            return

        logger.info(
            "\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)"
            % (pixel))
        logger.info("Working on " + sqlname)

        self.generate_query(pixel['ra_min'], pixel['ra_max'], pixel['dec_min'],
                            pixel['dec_max'], sqlname, dbname)

        try:
            self.query(self.release, taskname, sqlname)
        except subprocess.CalledProcessError, e:
            logger.error(e.output)
            self.drop(dbname)
            raise e
Exemple #14
0
def run(self):
    if 'pixelize' in self.opts.run:
        # Pixelize the raw catalog data
        logger.info("Running 'pixelize'...")
        rawdir = self.config['data']['dirname']
        rawfiles = sorted(glob.glob(os.path.join(rawdir, '*.fits')))
        x = ugali.preprocess.pixelize.pixelizeCatalog(rawfiles, self.config)
    if 'density' in self.opts.run:
        # Calculate magnitude limits
        logger.info("Running 'density'...")
        x = ugali.preprocess.pixelize.pixelizeDensity(self.config,
                                                      nside=2**9,
                                                      force=self.opts.force)
    if 'maglims' in self.opts.run:
        # Calculate magnitude limits
        logger.info("Running 'maglims'...")
        maglims = ugali.preprocess.maglims.Maglims(self.config)
        x = maglims.run(force=self.opts.force)
    if 'simple' in self.opts.run:
        # Calculate simple magnitude limits
        logger.info("Running 'simple'...")
        #ugali.preprocess.maglims.simple_maglims(self.config,force=self.opts.force)
        maglims = ugali.preprocess.maglims.Maglims(self.config)
        x = maglims.run(simple=True, force=self.opts.force)
    if 'split' in self.opts.run:
        logger.info("Running 'split'...")
        ugali.preprocess.maglims.simple_split(self.config,
                                              'split',
                                              force=self.opts.force)
Exemple #15
0
def pixelizeDensity(config, nside=None, force=False):
    if nside is None:
        nside = config['coords']['nside_likelihood']
    coordsys = config['coords']['coordsys'].upper()
    filenames = config.getFilenames()
    infiles = filenames[~filenames['catalog'].mask]
    lon_field = config['catalog']['lon_field'].upper()
    lat_field = config['catalog']['lat_field'].upper()

    for ii, f in enumerate(infiles.data):
        infile = f['catalog']
        pix = f['pix']
        logger.info('(%i/%i) %s' % (ii + 1, len(infiles), infile))

        outfile = config['data']['density'] % pix
        if os.path.exists(outfile) and not force:
            logger.info("Found %s; skipping..." % outfile)
            continue

        outdir = mkdir(os.path.dirname(outfile))
        pixels, density = stellarDensity(infile,
                                         nside,
                                         lon_field=lon_field,
                                         lat_field=lat_field)

        data = dict(PIXEL=pixels, DENSITY=density)
        healpix.write_partial_map(outfile,
                                  data,
                                  nside=nside,
                                  coord=coordsys[0])
Exemple #16
0
    def observableFractionMMD(self, mask, distance_modulus, mass_min=0.1):
        # This can be done faster...
        logger.info('Calculating observable fraction from MMD')

        mmd = self.signalMMD(mask,distance_modulus)
        obs_frac = mmd.sum(axis=-1).sum(axis=-1)[mask.mask_roi_digi[mask.roi.pixel_interior_cut]]
        return obs_frac
Exemple #17
0
    def calc_backgroundMMD(self):
        #ADW: At some point we may want to make the background level a fit parameter.
        logger.info('Calculating background MMD ...')
        self.mmd_background = self.mask.backgroundMMD(self.catalog_roi)
        #self.mmd_background = self.mask.backgroundMMD(self.catalog_roi,mode='histogram')
        #self.mmd_background = self.mask.backgroundMMD(self.catalog_roi,mode='uniform')
        # Background density (deg^-2 mag^-2) and background probability for each object
        logger.info('Calculating background probabilities ...')
        b_density = ugali.utils.binning.take2D(self.mmd_background,
                                               self.catalog.mag_2,
                                               self.catalog.mag_1,
                                               self.roi.bins_mag,
                                               self.roi.bins_mag)

        # ADW: I don't think this 'area_pixel' or 'delta_mag' factors are necessary,
        # so long as it is also removed from u_spatial and u_color
        #self._b = b_density * self.roi.area_pixel * self.delta_mag**2
        self._b = b_density

        if self.spatial_only:
            # ADW: This assumes a flat mask...
            #solid_angle_annulus = (self.mask.mask_1.mask_annulus_sparse > 0).sum()*self.roi.area_pixel
            solid_angle_annulus = (
                (self.mask.mask_1.mask_annulus_sparse > 0) *
                self.mask.frac_annulus_sparse).sum() * self.roi.area_pixel
            b_density = self.roi.inAnnulus(
                self.catalog_roi.lon,
                self.catalog_roi.lat).sum() / solid_angle_annulus
            self._b = np.array([b_density * self.roi.area_pixel])
Exemple #18
0
def pixelizeCatalog(infiles, config, force=False):
    """
    Break catalog up into a set of healpix files.
    """
    nside_catalog = config['coords']['nside_catalog']
    nside_pixel = config['coords']['nside_pixel']
    outdir = mkdir(config['catalog']['dirname'])
    filenames = config.getFilenames()
    
    for ii,infile in enumerate(infiles):
        logger.info('(%i/%i) %s'%(ii+1, len(infiles), infile))
        f = pyfits.open(infile)
        data = f[1].data
        header = f[1].header
        logger.info("%i objects found"%len(data))
        if not len(data): continue
        glon,glat = cel2gal(data['RA'],data['DEC'])
        catalog_pix = ang2pix(nside_catalog,glon,glat,coord='GAL')
        pixel_pix = ang2pix(nside_pixel,glon,glat,coord='GAL')
        names = [n.upper() for n in data.columns.names]
        ra_idx = names.index('RA'); dec_idx = names.index('DEC')
        idx = ra_idx if ra_idx > dec_idx else dec_idx
        catalog_pix_name = 'PIX%i'%nside_catalog
        pixel_pix_name = 'PIX%i'%nside_pixel

        coldefs = pyfits.ColDefs(
            [pyfits.Column(name='GLON',format='1D',array=glon),
             pyfits.Column(name='GLAT',format='1D',array=glat),
             pyfits.Column(name=catalog_pix_name,format='1J',array=catalog_pix),
             pyfits.Column(name=pixel_pix_name  ,format='1J',array=pixel_pix)]
        )
        hdu = pyfits.new_table(data.columns[:idx+1]+coldefs+data.columns[idx+1:])
        table = hdu.data

        for pix in numpy.unique(catalog_pix):
            logger.debug("Processing pixel %s"%pix)
            outfile = filenames.data['catalog'][pix]
            if not os.path.exists(outfile):
                logger.debug("Creating %s"%outfile)
                names = [n.upper() for n in table.columns.names]
                formats = table.columns.formats
                columns = [pyfits.Column(n,f) for n,f in zip(names,formats)]
                out = pyfits.HDUList([pyfits.PrimaryHDU(),pyfits.new_table(columns)])
                out[1].header['NSIDE'] = nside_catalog
                out[1].header['PIX'] = pix
                out.writeto(outfile)
            hdulist = pyfits.open(outfile,mode='update')
            t1 = hdulist[1].data
            # Could we speed up with sorting and indexing?
            t2 = table[ table[catalog_pix_name] == pix ]
            nrows1 = t1.shape[0]
            nrows2 = t2.shape[0]
            nrows = nrows1 + nrows2
            out = pyfits.new_table(t1.columns, nrows=nrows)
            for name in t1.columns.names:
                out.data.field(name)[nrows1:]=t2.field(name)
            hdulist[1] = out
            logger.debug("Writing %s"%outfile)
            hdulist.flush()
            hdulist.close()
Exemple #19
0
    def download(self, pixel, outdir=None, force=False):
        import pyfits 

        if outdir is None: outdir = './'
        else:              mkdir(outdir)
        sqldir = mkdir(os.path.join(outdir,'sql'))
        self._setup_desdbi()

        basename = self.basename + "_%04d"%pixel['name']
        sqlname = os.path.join(sqldir,basename+'.sql')
        taskname = basename
        outfile = os.path.join(outdir,basename+".fits")
        # ADW: There should be a 'force' option here
        if os.path.exists(outfile) and not force:
            logger.warning("Found %s; skipping..."%(outfile))
            return

        logger.info("\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)"%(pixel))
        logger.info("Working on "+sqlname)
         
        self.generate_query(pixel['ra_min'],pixel['ra_max'],pixel['dec_min'],pixel['dec_max'],sqlname,outfile)
        ret = self.query(self.release,taskname,sqlname)
        if ret != 0:
            msg = "Download failed to complete."
            raise Exception(msg)
        return outfile
Exemple #20
0
    def download(self, pixel, outdir=None, force=False):
        if outdir is None: outdir = './'
        else:              mkdir(outdir)
        sqldir = mkdir(os.path.join(outdir,'sql'))
        self._setup_casjobs()

        basename = self.basename + "_%04d"%pixel['name']
        sqlname = os.path.join(sqldir,basename+'.sql')
        dbname = basename+'_output'
        taskname = basename
        outfile = os.path.join(outdir,basename+".fits")
        if os.path.exists(outfile) and not force:
            logger.warning("Found %s; skipping..."%(outfile))
            return

        logger.info("\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)"%(pixel))
        logger.info("Working on "+sqlname)
         
        self.generate_query(pixel['ra_min'],pixel['ra_max'],pixel['dec_min'],pixel['dec_max'],sqlname,dbname)

        try:
            self.query(self.release,taskname,sqlname)
        except subprocess.CalledProcessError, e:
            logger.error(e.output)
            self.drop(dbname)
            raise e
Exemple #21
0
    def _setup_cmd(self,mode='cloud-in-cells'):
        """
        The purpose here is to create a more finely binned
        background CMD to sample from.
        """
        # Only setup once...
        if hasattr(self,'bkg_lambda'): return

        logger.info("Setup color...")
        # In the limit theta->0: 2*pi*(1-cos(theta)) -> pi*theta**2
        # (Remember to convert from sr to deg^2) 
        #solid_angle_roi = sr2deg(2*np.pi*(1-np.cos(np.radians(self.roi_radius))))
        solid_angle_roi = self.roi.area_pixel*len(self.roi.pixels)

        # Large CMD bins cause problems when simulating
        config = Config(self.config) 
        config['color']['n_bins'] *= 5 #10
        config['mag']['n_bins']   *= 1 #2
        #config['mask']['minimum_solid_angle'] = 0
        roi = ugali.analysis.loglike.createROI(config,self.roi.lon,self.roi.lat)
        mask = ugali.analysis.loglike.createMask(config,roi)

        self.bkg_centers_color  = roi.centers_color
        self.bkg_centers_mag    = roi.centers_mag

        # Background CMD has units: [objs / deg^2 / mag^2]
        cmd_background = mask.backgroundCMD(self.catalog,mode)
        
        self.bkg_lambda=cmd_background*solid_angle_roi*roi.delta_color*roi.delta_mag
        np.sum(self.bkg_lambda)

        # Clean up 
        del config, roi, mask
Exemple #22
0
    def download(self, pixel, outdir=None, force=False):
        import pyfits

        if outdir is None: outdir = './'
        else: mkdir(outdir)
        sqldir = mkdir(os.path.join(outdir, 'sql'))
        self._setup_desdbi()

        basename = self.basename + "_%04d" % pixel['name']
        sqlname = os.path.join(sqldir, basename + '.sql')
        taskname = basename
        outfile = os.path.join(outdir, basename + ".fits")
        # ADW: There should be a 'force' option here
        if os.path.exists(outfile) and not force:
            logger.warning("Found %s; skipping..." % (outfile))
            return

        logger.info(
            "\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)"
            % (pixel))
        logger.info("Working on " + sqlname)

        self.generate_query(pixel['ra_min'], pixel['ra_max'], pixel['dec_min'],
                            pixel['dec_max'], sqlname, outfile)
        ret = self.query(self.release, taskname, sqlname)
        if ret != 0:
            msg = "Download failed to complete."
            raise Exception(msg)
        return outfile
Exemple #23
0
    def __init__(self, config, loglike):  # What it should be...
        """
        Object to efficiently search over a grid of ROI positions.

        Parameters:
        -----------
        config  : Configuration object or filename.
        loglike : Log-likelihood object

        Returns:
        --------
        grid    : GridSearch instance
        """

        self.config = Config(config)
        self.loglike = loglike
        self.source = self.loglike.source
        self.roi = self.loglike.roi
        self.mask = self.loglike.mask

        logger.info(str(self.loglike))

        self.stellar_mass_conversion = self.source.stellar_mass()
        self.distance_modulus_array = np.asarray(
            self.config['scan']['distance_modulus_array'])
        self.extension_array = np.asarray(self.config['scan'].get(
            'extension_array', [self.source.extension]))
Exemple #24
0
def inFootprint(config, pixels, nside=None):
    """
    Open each valid filename for the set of pixels and determine the set 
    of subpixels with valid data.

    Parameters
    ----------
    config : config
        Configuration (file or object)
    pixels : array or int
        List of pixels to create footprint for
    nside  : int, optional
        Healpix nside
        
    Returns
    -------
    inside : array
        Boolean array of whether pixel is in footprint
    """
    logger.info("Calculating survey footprint...")

    config = Config(config)
    nside_catalog = config['coords']['nside_catalog']
    nside_likelihood = config['coords']['nside_likelihood']
    nside_pixel = config['coords']['nside_pixel']

    if np.isscalar(pixels): pixels = np.array([pixels])
    if nside is None: nside = nside_likelihood

    filenames = config.getFilenames()
    catalog_pixels = filenames['pix'].compressed()

    inside = np.zeros(len(pixels), dtype=bool)
    if not nside_catalog:
        catalog_pix = [0]
    else:
        catalog_pix = superpixel(pixels, nside, nside_catalog)
        catalog_pix = np.intersect1d(catalog_pix, catalog_pixels)

    fnames = filenames[catalog_pix]

    # Load the first mask
    logger.debug("Loading %s" % fnames['mask_1'])
    _nside, subpix1, val1 = read_partial_map(fnames['mask_1'],
                                             'MAGLIM',
                                             fullsky=False,
                                             multiproc=8)
    # Load the second mask
    logger.debug("Loading %s" % fnames['mask_2'])
    _nside, subpix2, val2 = read_partial_map(fnames['mask_2'],
                                             'MAGLIM',
                                             fullsky=False,
                                             multiproc=8)
    # Run the subpixels
    subpix = np.intersect1d(subpix1, subpix2)
    superpix = np.unique(superpixel(subpix, nside_pixel, nside))
    inside |= np.in1d(pixels, superpix)

    return inside
Exemple #25
0
    def write(self, outfile):
        """
        Save the likelihood results as a sparse HEALPix map.
        """
        data = odict()
        data['PIXEL'] = self.roi.pixels_target
        # Full data output (too large for survey)
        if self.config['scan']['full_pdf']:
            data['LOG_LIKELIHOOD'] = self.log_likelihood_sparse_array.T
            data['RICHNESS'] = self.richness_sparse_array.T
            data['RICHNESS_LOWER'] = self.richness_lower_sparse_array.T
            data['RICHNESS_UPPER'] = self.richness_upper_sparse_array.T
            data['RICHNESS_LIMIT'] = self.richness_upper_limit_sparse_array.T
            #data['STELLAR_MASS']=self.stellar_mass_sparse_array.T
            data[
                'FRACTION_OBSERVABLE'] = self.fraction_observable_sparse_array.T
        else:
            data['LOG_LIKELIHOOD'] = self.log_likelihood_sparse_array.T
            data['RICHNESS'] = self.richness_sparse_array.T
            data[
                'FRACTION_OBSERVABLE'] = self.fraction_observable_sparse_array.T

        # Convert to 32bit float
        for k in data.keys()[1:]:
            data[k] = data[k].astype('f4', copy=False)

        # Stellar mass can be calculated from STELLAR * RICHNESS
        header = odict()
        header['STELLAR'] = round(self.stellar_mass_conversion, 8)
        header['LKDNSIDE'] = self.config['coords']['nside_likelihood']
        header['LKDPIX'] = ang2pix(self.config['coords']['nside_likelihood'],
                                   self.roi.lon, self.roi.lat)
        header['NROI'] = self.roi.inROI(self.loglike.catalog_roi.lon,
                                        self.loglike.catalog_roi.lat).sum()
        header['NANNULUS'] = self.roi.inAnnulus(
            self.loglike.catalog_roi.lon, self.loglike.catalog_roi.lat).sum()
        header['NINSIDE'] = self.roi.inInterior(
            self.loglike.catalog_roi.lon, self.loglike.catalog_roi.lat).sum()
        header['NTARGET'] = self.roi.inTarget(
            self.loglike.catalog_roi.lon, self.loglike.catalog_roi.lat).sum()

        # Flatten if there is only a single distance modulus
        # ADW: Is this really what we want to do?
        if len(self.distance_modulus_array) == 1:
            for key in data:
                data[key] = data[key].flatten()

        logger.info("Writing %s..." % outfile)
        write_partial_map(outfile,
                          data,
                          nside=self.config['coords']['nside_pixel'],
                          header=header,
                          clobber=True)

        fitsio.write(outfile,
                     dict(DISTANCE_MODULUS=self.distance_modulus_array.astype(
                         'f4', copy=False)),
                     extname='DISTANCE_MODULUS',
                     clobber=False)
Exemple #26
0
 def print_info(self,age,metallicity):
     params = dict(age=age,z=metallicity)
     params['name'] = self.__class__.__name__
     params['survey'] = self.survey
     params['feh'] = self.z2feh(metallicity)
     msg = 'Downloading: %(name)s (survey=%(survey)s, age=%(age).1fGyr, Z=%(z).5f, Fe/H=%(feh).3f)'%params
     logger.info(msg)
     return msg
Exemple #27
0
 def print_info(self,age,metallicity):
     params = dict(age=age,z=metallicity)
     params['name'] = self.__class__.__name__
     params['survey'] = self.survey
     params['feh'] = self.isochrone.z2feh(metallicity)
     msg = 'Downloading: %(name)s (survey=%(survey)s, age=%(age).1fGyr, Z=%(z).5f, Fe/H=%(feh).3f)'%params
     logger.info(msg)
     return msg
Exemple #28
0
 def query(self,dbase,task,query):
     logger.info("Running query...")
     cmd = "java -jar casjobs.jar run -t %s -n %s -f %s" % (dbase,task,query)
     logger.info(cmd)
     ret = subprocess.check_output(cmd,shell=True,stderr=subprocess.STDOUT) 
     if 'ERROR:' in ret:
         raise subprocess.CalledProcessError(1,cmd,ret)
     return ret
Exemple #29
0
 def writeLabels(self,filename=None):
     if filename is None: filename = self.labelfile
     # Converting to float is a waste of memory...
     # This should be much more robustly done in writeSparseHealpixMap
     data_dict = {'LABEL':self.labels.astype(float)}
     logger.info("Writing %s..."%filename)
     ugali.utils.skymap.writeSparseHealpixMap(self.pixels,data_dict,self.nside,filename,
                                              distance_modulus_array=self.distances)
Exemple #30
0
 def read_population(self, filename=None):
     if not filename:
         filename = os.path.join(self.config['simulate']['dirname'],
                                 self.config['simulate']['popfile'])
     logger.info("Reading population file: %s" % filename)
     pop = ugali.utils.fileio.read(filename)
     pop.dtype.names = list(map(str.upper, pop.dtype.names))
     return pop
Exemple #31
0
    def write(self, outfile):
        """
        Save the likelihood results as a sparse HEALPix map.
        """
        data = odict()
        data['PIXEL']=self.roi.pixels_target
        # Full data output (too large for survey)
        if self.config['scan']['full_pdf']:
            data['LOG_LIKELIHOOD']=self.log_likelihood_sparse_array.T
            data['RICHNESS']=self.richness_sparse_array.T
            data['RICHNESS_LOWER']=self.richness_lower_sparse_array.T
            data['RICHNESS_UPPER']=self.richness_upper_sparse_array.T
            data['RICHNESS_LIMIT']=self.richness_upper_limit_sparse_array.T
            #data['STELLAR_MASS']=self.stellar_mass_sparse_array.T
            data['FRACTION_OBSERVABLE']=self.fraction_observable_sparse_array.T
        else:
            data['LOG_LIKELIHOOD']=self.log_likelihood_sparse_array.T
            data['RICHNESS']=self.richness_sparse_array.T
            data['FRACTION_OBSERVABLE']=self.fraction_observable_sparse_array.T

        # Convert to 32bit float
        for k in list(data.keys())[1:]:
            data[k] = data[k].astype('f4',copy=False)
            
        # Stellar mass can be calculated from STELLAR * RICHNESS
        header = odict()
        header['STELLAR']=round(self.stellar_mass_conversion,8)
        header['LKDNSIDE']=self.config['coords']['nside_likelihood']
        header['LKDPIX']=ang2pix(self.config['coords']['nside_likelihood'],
                                 self.roi.lon,self.roi.lat)
        header['NROI']=self.roi.inROI(self.loglike.catalog_roi.lon,
                                      self.loglike.catalog_roi.lat).sum()
        header['NANNULUS']=self.roi.inAnnulus(self.loglike.catalog_roi.lon,
                                              self.loglike.catalog_roi.lat).sum()
        header['NINSIDE']=self.roi.inInterior(self.loglike.catalog_roi.lon,
                                              self.loglike.catalog_roi.lat).sum()
        header['NTARGET']=self.roi.inTarget(self.loglike.catalog_roi.lon,
                                            self.loglike.catalog_roi.lat).sum()

        # Flatten if there is only a single distance modulus
        # ADW: Is this really what we want to do?
        if len(self.distance_modulus_array) == 1:
            for key in data:
                data[key] = data[key].flatten()

        logger.info("Writing %s..."%outfile)
        write_partial_map(outfile,data,
                          nside=self.config['coords']['nside_pixel'],
                          header=header,
                          clobber=True
                          )
        
        fitsio.write(outfile,
                     dict(DISTANCE_MODULUS=self.distance_modulus_array.astype('f4',copy=False)),
                     extname='DISTANCE_MODULUS',
                     clobber=False)
Exemple #32
0
 def throttle(self,max_jobs=None,sleep=60):
     if max_jobs is None: max_jobs = self.max_jobs
     if max_jobs is None: return
     while True:
         njobs = self.njobs()
         if njobs < max_jobs:
             return
         else:
             logger.info('%i jobs already in queue, waiting...'%(njobs))
             time.sleep(sleep)
Exemple #33
0
 def throttle(self,max_jobs=None,sleep=60):
     if max_jobs is None: max_jobs = self.max_jobs
     if max_jobs is None: return
     while True:
         njobs = self.njobs()
         if njobs < max_jobs:
             return
         else:
             logger.info('%i jobs already in queue, waiting...'%(njobs))
             time.sleep(sleep)
def do_membership(args):
    """ Write the membership output file """
    config,name,label,coord = args

    filenames = make_filenames(config,label)
    srcfile = filenames['srcfile']
    memfile = filenames['memfile']

    logger.info("Writing %s..."%memfile)
    from ugali.analysis.loglike import write_membership
    write_membership(memfile,config,srcfile,section='source')
Exemple #35
0
def match_exposures(data, radius=1.0):
    """ A matching algorithm that does not associate objects on the
    same exposure with each other. This algorithm ended up being too
    slow.

    Parameters:
    -----------
    data   : input data containing 'RA' and 'DEC'
    radius : matching radius (arcsec)

    Returns:
    --------
    match_id : the object matching id
    """

    expnums, counts = np.unique(data['EXPNUM'], return_counts=True)
    nobjs = len(data)
    nexp = len(expnums)
    logger.info("Found %i objects in %i exposures" % (nobjs, nexp))
    match_id = -1 * np.ones(nobjs, dtype=int)

    for i, expnum in enumerate(expnums[counts.argsort()[::-1]]):
        exp_idx = np.where(data['EXPNUM'] == expnum)[0]

        if i == 0:
            unique_idx = exp_idx
            match_id[unique_idx] = np.arange(len(unique_idx), dtype=int)

        m = match(data['RA'][exp_idx], data['DEC'][exp_idx],
                  data['RA'][unique_idx], data['DEC'][unique_idx])
        exp_match_idx, unique_match_idx, sep = m

        matched = np.where(sep <= radius / 3600.)[0]
        unmatched = np.where(sep > radius / 3600.)[0]
        logger.info("%i: EXPNUM=%i NOBJS=%i NMATCH=%i" %
                    (i, expnum, len(exp_idx), len(matched)))
        # Assign the existing match_id to matched objects
        ii = unique_idx[unique_match_idx[matched]]
        jj = exp_idx[exp_match_idx[matched]]
        match_id[jj] = match_id[ii]

        # Create a new match_id for unmatched objects
        new_id = np.arange(len(unmatched), dtype=int) + np.max(match_id) + 1
        kk = exp_idx[exp_match_idx[unmatched]]
        match_id[kk] = new_id

        # Add the unmatched indices to list of unique indexes
        unique_idx = np.hstack([unique_idx, exp_idx[unmatched]])

    if np.any(match_id < 0):
        msg = "Unmatched objects found."
        raise Exception(msg)

    return match_id
Exemple #36
0
def do_membership(args):
    """ Write the membership output file """
    config, name, label, coord = args

    filenames = make_filenames(config, label)
    srcfile = filenames['srcfile']
    memfile = filenames['memfile']

    logger.info("Writing %s..." % memfile)
    from ugali.analysis.loglike import write_membership
    write_membership(memfile, config, srcfile, section='source')
Exemple #37
0
    def labelHealpix(pixels, values, nside, threshold=0, xsize=1000):
        """
        Label contiguous regions of a (sparse) HEALPix map. Works by mapping 
        HEALPix array to a Mollweide projection and applying scipy.ndimage.label
     
        Assumes non-nested HEALPix map.
        
        Parameters:
        pixels    : Pixel values associated to (sparse) HEALPix array
        values    : (Sparse) HEALPix array of data values
        nside     : HEALPix dimensionality
        threshold : Threshold value for object detection
        xsize     : Size of Mollweide projection
        
        Returns:
        labels, nlabels
        """
        proj = healpy.projector.MollweideProj(xsize=xsize)
        vec = healpy.pix2vec(nside, pixels)
        xy = proj.vec2xy(vec)
        ij = proj.xy2ij(xy)
        xx, yy = proj.ij2xy()

        # Convert to Mollweide
        searchims = []
        if values.ndim < 2: iterate = [values]
        else: iterate = values.T
        for i, value in enumerate(iterate):
            logger.debug("Labeling slice %i...")
            searchim = numpy.zeros(xx.shape, dtype=bool)
            select = (value > threshold)
            yidx = ij[0][select]
            xidx = ij[1][select]
            searchim[yidx, xidx] |= True
            searchims.append(searchim)
        searchims = numpy.array(searchims)

        # Full binary structure
        s = ndimage.generate_binary_structure(searchims.ndim, searchims.ndim)

        ### # Dilate in the z-direction
        logger.info("  Dilating image...")
        searchims = ndimage.binary_dilation(searchims, s, 1)

        # Do the labeling
        logger.info("  Labeling image...")
        labels, nlabels = ndimage.label(searchims, structure=s)

        # Convert back to healpix
        pix_labels = labels[:, ij[0], ij[1]].T
        pix_labels = pix_labels.reshape(values.shape)
        pix_labels *= (values > threshold)  # re-trim

        return pix_labels, nlabels
Exemple #38
0
    def writeCandidates(self,filename=None):
        if filename is None: filename = self.candfile

        threshold = self.config['search']['cand_threshold']
        select  = (self.assocs['CUT']==0)
        select &= (self.assocs['TS']>threshold)
        #select &= (self.assocs['ASSOC2']=='')

        self.candidates = self.assocs[select]
        logger.info("Writing %s..."%filename)
        fitsio.write(filename,self.candidates,clobber=True)
Exemple #39
0
    def writeCandidates(self, filename=None):
        if filename is None: filename = self.candfile

        threshold = self.config['search']['cand_threshold']
        select = (self.assocs['CUT'] == 0)
        select &= (self.assocs['TS'] > threshold)
        #select &= (self.assocs['ASSOC2']=='')

        self.candidates = self.assocs[select]
        logger.info("Writing %s..." % filename)
        fitsio.write(filename, self.candidates, clobber=True)
Exemple #40
0
 def writeLabels(self, filename=None):
     if filename is None: filename = self.labelfile
     # Converting to float is a waste of memory...
     # This should be much more robustly done in writeSparseHealpixMap
     data_dict = {'LABEL': self.labels.astype(float)}
     logger.info("Writing %s..." % filename)
     ugali.utils.skymap.writeSparseHealpixMap(
         self.pixels,
         data_dict,
         self.nside,
         filename,
         distance_modulus_array=self.distances)
Exemple #41
0
 def sky(self,lon=None,lat=None,size=1):
     logger.info("Generating %i random points..."%size)
     # Random longitue and latitude
     lon,lat = ugali.utils.stats.sky(lon,lat,size=10*size)
     # Random healpix coordinates inside footprint
     nside_pixel = self.config['coords']['nside_pixel']
     pixels = ang2pix(nside_pixel,lon,lat)
     if np.unique(pixels).size > 1:
         inside = ugali.utils.skymap.inFootprint(self.config,pixels,nside=nside_pixel)
     else:
         inside = np.ones(len(pixels),dtype=bool)
     return lon[inside][:size],lat[inside][:size]
Exemple #42
0
    def labelHealpix(pixels, values, nside, threshold=0, xsize=1000):
        """
        Label contiguous regions of a (sparse) HEALPix map. Works by mapping 
        HEALPix array to a Mollweide projection and applying scipy.ndimage.label
     
        Assumes non-nested HEALPix map.
        
        Parameters:
        pixels    : Pixel values associated to (sparse) HEALPix array
        values    : (Sparse) HEALPix array of data values
        nside     : HEALPix dimensionality
        threshold : Threshold value for object detection
        xsize     : Size of Mollweide projection
        
        Returns:
        labels, nlabels
        """
        proj = healpy.projector.MollweideProj(xsize=xsize)
        vec = healpy.pix2vec(nside,pixels)
        xy = proj.vec2xy(vec)
        ij = proj.xy2ij(xy)
        xx,yy = proj.ij2xy()
     
        # Convert to Mollweide
        searchims = []
        if values.ndim < 2: iterate = [values]
        else:               iterate = values.T
        for i,value in enumerate(iterate):
            logger.debug("Labeling slice %i...")
            searchim = numpy.zeros(xx.shape,dtype=bool)
            select = (value > threshold)
            yidx = ij[0][select]; xidx = ij[1][select]
            searchim[yidx,xidx] |= True
            searchims.append( searchim )
        searchims = numpy.array(searchims)

        # Full binary structure
        s = ndimage.generate_binary_structure(searchims.ndim,searchims.ndim)
     
        ### # Dilate in the z-direction
        logger.info("  Dilating image...")
        searchims = ndimage.binary_dilation(searchims,s,1)
        
        # Do the labeling
        logger.info("  Labeling image...")
        labels,nlabels = ndimage.label(searchims,structure=s)

        # Convert back to healpix
        pix_labels = labels[:,ij[0],ij[1]].T
        pix_labels = pix_labels.reshape(values.shape)
        pix_labels *= (values > threshold) # re-trim

        return pix_labels, nlabels
Exemple #43
0
def write_partial_map(filename,
                      data,
                      nside,
                      coord=None,
                      nest=False,
                      header=None,
                      dtype=None,
                      **kwargs):
    """
    Partial HEALPix maps are used to efficiently store maps of the sky by only
    writing out the pixels that contain data.

    Three-dimensional data can be saved by supplying a distance modulus array
    which is stored in a separate extension.

    Parameters:
    -----------
    filename : output file name
    data     : dictionary or recarray of data to write (must contain 'PIXEL')
    nside    : healpix nside of data
    coord    : 'G'alactic, 'C'elestial, 'E'cliptic
    ordering : 'RING' or 'NEST'
    kwargs   : Passed to fitsio.write

    Returns:
    --------
    None
    """
    import fitsio

    # ADW: Do we want to make everything uppercase?

    if isinstance(data, dict):
        names = list(data.keys())
    else:
        names = data.dtype.names

    if 'PIXEL' not in names:
        msg = "'PIXEL' column not found."
        raise ValueError(msg)

    hdr = header_odict(nside=nside, coord=coord, nest=nest)
    fitshdr = fitsio.FITSHDR(list(hdr.values()))
    if header is not None:
        for k, v in header.items():
            fitshdr.add_record({'name': k, 'value': v})
    # ADW: Should this be a debug?
    logger.info("Writing %s..." % filename)
    fitsio.write(filename,
                 data,
                 extname='PIX_DATA',
                 header=fitshdr,
                 clobber=True)
Exemple #44
0
 def sky(self,lon=None,lat=None,size=1):
     logger.info("Generating %i random points..."%size)
     # Random longitue and latitude
     lon,lat = ugali.utils.stats.sky(lon,lat,size=10*size)
     # Random healpix coordinates inside footprint
     nside_pixel = self.config['coords']['nside_pixel']
     pixels = ang2pix(nside_pixel,lon,lat)
     if np.unique(pixels).size > 1:
         inside = ugali.utils.skymap.inFootprint(self.config,pixels,nside=nside_pixel)
     else:
         inside = np.ones(len(pixels),dtype=bool)
     return lon[inside][:size],lat[inside][:size]
Exemple #45
0
    def writeLabels(self,filename=None):
        if filename is None: filename = self.labelfile

        # ADW: Is it necessary to convert labels?
        data_dict = {'PIXEL':self.pixels,
                     'LABEL':self.labels.astype(float,copy=False)}

        logger.info("Writing %s..."%filename)
        healpix.write_partial_map(filename,data_dict,self.nside)
        fitsio.write(filename,
                     {'DISTANCE_MODULUS':self.distances.astype('f4',copy=False)},
                     extname='DISTANCE_MODULUS',
                     clobber=False)
Exemple #46
0
 def read_catalog(self, filename=None):
     if not filename:
         filename = os.path.join(self.config['simulate']['dirname'],
                                 self.config['simulate']['catfile'])
     logger.info("Reading catalog file: %s" % filename)
     catalog = ugali.observation.catalog.Catalog(self.config,
                                                 filenames=filename)
     catalog.data = mlab.rec_append_fields(catalog.data,
                                           names=['PIX8', 'PIX4096'],
                                           arrs=np.zeros(
                                               (2, len(catalog.lon)),
                                               dtype='>i8'))
     return catalog
Exemple #47
0
def do_membership(args):
    """ Write the membership output file """
    config,name,label,coord = args

    filenames = make_filenames(config,label)
    srcfile = filenames['srcfile']
    memfile = filenames['memfile']

    source = ugali.analysis.source.Source()
    source.load(srcfile,'source')

    loglike = ugali.analysis.loglike.createLoglike(config,source)
    logger.info("Writing %s..."%memfile)
    loglike.write_membership(memfile)
Exemple #48
0
    def _applySelection(self,selection=None):
        # ADW: This is a hack (eval is unsafe!)
        if selection is None:
            selection = self.config['catalog'].get('selection')

        if not selection: 
            return
        elif 'self.data' not in selection:
            msg = "Selection does not contain 'data'"
            raise Exception(msg)
        else:
            logger.info('Evaluating selection: \n"%s"'%selection)
            sel = eval(selection)
            self.data = self.data[sel]
Exemple #49
0
    def spatialBin(self, roi):
        """
        Calculate indices of ROI pixels corresponding to object locations.
        """
        if hasattr(self,'pixel_roi_index') and hasattr(self,'pixel'): 
            logger.warning('Catalog alread spatially binned')
            return

        # ADW: Not safe to set index = -1 (since it will access last entry); 
        # np.inf would be better...
        self.pixel = ang2pix(self.config['coords']['nside_pixel'],self.lon,self.lat)
        self.pixel_roi_index = roi.indexROI(self.lon,self.lat)

        logger.info("Found %i objects outside ROI"%(self.pixel_roi_index < 0).sum())
Exemple #50
0
    def satellite(self,stellar_mass,distance_modulus,mc_source_id=1,seed=None,**kwargs):
        """
        Create a simulated satellite. Returns a catalog object.
        """
        if seed is not None: np.random.seed(seed)

        isochrone = kwargs.pop('isochrone',self.isochrone)
        kernel    = kwargs.pop('kernel',self.kernel)

        for k,v in kwargs.items():
            if k in kernel.params.keys(): setattr(kernel,k,v)

        mag_1, mag_2 = isochrone.simulate(stellar_mass, distance_modulus)
        lon, lat     = kernel.simulate(len(mag_1))
 
        logger.info("Simulating %i satellite stars..."%len(mag_1))
        pix = ang2pix(self.config['coords']['nside_pixel'], lon, lat)

        # There is probably a better way to do this step without creating the full HEALPix map
        mask = -1. * numpy.ones(healpy.nside2npix(self.config['coords']['nside_pixel']))
        mask[self.roi.pixels] = self.mask.mask_1.mask_roi_sparse
        mag_lim_1 = mask[pix]
        mask = -1. * numpy.ones(healpy.nside2npix(self.config['coords']['nside_pixel']))
        mask[self.roi.pixels] = self.mask.mask_2.mask_roi_sparse
        mag_lim_2 = mask[pix]

        mag_err_1 = self.photo_err_1(mag_lim_1 - mag_1)
        mag_err_2 = self.photo_err_2(mag_lim_2 - mag_2)

        # Randomize magnitudes by their errors
        mag_obs_1 = mag_1+numpy.random.normal(size=len(mag_1))*mag_err_1
        mag_obs_2 = mag_2+numpy.random.normal(size=len(mag_2))*mag_err_2
        #mag_obs_1 = mag_1
        #mag_obs_2 = mag_2

        #select = numpy.logical_and(mag_obs_1 < mag_lim_1, mag_obs_2 < mag_lim_2)
        select = (mag_lim_1>mag_obs_1)&(mag_lim_2>mag_obs_2)

        # Make sure objects lie within the original cmd (should also be done later...)
        #select &= (ugali.utils.binning.take2D(self.mask.solid_angle_cmd, mag_obs_1 - mag_obs_2, mag_obs_1,self.roi.bins_color, self.roi.bins_mag) > 0)

        #return mag_1_obs[cut], mag_2_obs[cut], lon[cut], lat[cut]
        logger.info("Clipping %i simulated satellite stars..."%(~select).sum())
        mc_source_id = mc_source_id * numpy.ones(len(mag_1))
        
        hdu = ugali.observation.catalog.makeHDU(self.config,mag_obs_1[select],mag_err_1[select],
                                                mag_obs_2[select],mag_err_2[select], 
                                                lon[select],lat[select],mc_source_id[select])
        catalog = ugali.observation.catalog.Catalog(self.config, data=hdu.data)
        return catalog
Exemple #51
0
    def __init__(self, infiles, roi):
        """
        Parameters:
        -----------
        infiles : list of sparse healpix mask files
        roi : roi object

        Returns:
        --------
        mask : MaskBand object
        """
        self.roi = roi
        self.config = self.roi.config

        # ADW: It's overkill to make the full map just to slim it
        # down, but we don't have a great way to go from map pixels to
        # roi pixels.
        nside,pixel,maglim = healpix.read_partial_map(infiles,column='MAGLIM')
        self.nside = nside

        # Sparse maps of pixels in various ROI regions
        self.mask_roi_sparse = maglim[self.roi.pixels] 

        # Try to get the detection fraction
        self.frac_roi_sparse = (self.mask_roi_sparse > 0)
        try: 
            logger.debug("Reading FRACDET...")
            nside,pixel,frac=healpix.read_partial_map(infiles,column='FRACDET')
            # This clipping might gloss over bugs...
            fractype = self.config['mask'].get('fractype','binary')
            fracmin = self.config['mask'].get('fracmin',0.5)
            if fractype == 'binary':
                frac = np.where(frac < fracmin, 0.0, 1.0)
            elif fractype == 'full':
                frac = np.where(frac < fracmin, 0.0, frac)
            elif not fractype:
                pass
            else:
                msg = "Unrecognized fractype: %s"%fractype
                logger.warn(msg)
                
            self.frac_roi_sparse = np.clip(frac[self.roi.pixels],0.0,1.0)
        except ValueError as e:
            # No detection fraction present
            msg = "No 'FRACDET' column found in masks; assuming FRACDET = 1.0"
            logger.info(msg)

        # Explicitly zero the maglim of pixels with fracdet < fracmin
        self.mask_roi_sparse[self.frac_roi_sparse == 0] = 0.0
Exemple #52
0
    def _defineVariables(self):
        """
        Helper funtion to define pertinent variables from catalog data.
        """
        self.objid = self.data.field(self.config['catalog']['objid_field'])
        self.lon = self.data.field(self.config['catalog']['lon_field'])
        self.lat = self.data.field(self.config['catalog']['lat_field'])

        #if self.config['catalog']['coordsys'].lower() == 'cel' \
        #   and self.config['coords']['coordsys'].lower() == 'gal':
        #    logger.info('Converting catalog objects from CELESTIAL to GALACTIC cboordinates')
        #    self.lon, self.lat = ugali.utils.projector.celToGal(self.lon, self.lat)
        #elif self.config['catalog']['coordsys'].lower() == 'gal' \
        #   and self.config['coords']['coordsys'].lower() == 'cel':
        #    logger.info('Converting catalog objects from GALACTIC to CELESTIAL coordinates')
        #    self.lon, self.lat = ugali.utils.projector.galToCel(self.lon, self.lat)

        self.mag_1 = self.data.field(self.config['catalog']['mag_1_field'])
        self.mag_err_1 = self.data.field(self.config['catalog']['mag_err_1_field'])
        self.mag_2 = self.data.field(self.config['catalog']['mag_2_field'])
        self.mag_err_2 = self.data.field(self.config['catalog']['mag_err_2_field'])

        if self.config['catalog']['mc_source_id_field'] is not None:
            if self.config['catalog']['mc_source_id_field'] in self.data.names:
                self.mc_source_id = self.data.field(self.config['catalog']['mc_source_id_field'])
                logger.info('Found %i MC source objects'%(numpy.sum(self.mc_source_id > 0)))
            else:
                #ADW: This is pretty kludgy, please fix... (FIXME)
                columns_array = [pyfits.Column(name = self.config['catalog']['mc_source_id_field'],
                                               format = 'I',
                                               array = numpy.zeros(len(self.data)))]
                hdu = pyfits.new_table(columns_array)
                self.data = pyfits.new_table(pyfits.new_table(self.data.view(np.recarray)).columns + hdu.columns).data
                self.mc_source_id = self.data.field(self.config['catalog']['mc_source_id_field'])

        # should be @property
        if self.config['catalog']['band_1_detection']:
            self.mag = self.mag_1
            self.mag_err = self.mag_err_1
        else:
            self.mag = self.mag_2
            self.mag_err = self.mag_err_2
            
        # should be @property
        self.color = self.mag_1 - self.mag_2
        self.color_err = numpy.sqrt(self.mag_err_1**2 + self.mag_err_2**2)

        logger.info('Catalog contains %i objects'%(len(self.data)))
Exemple #53
0
    def calc_signal_color2(self, distance_modulus, mass_steps=1000):
        """
        Compute signal color probability (u_color) for each catalog object on the fly.
        """
        logger.info('Calculating signal color from MMD')

        mag_1, mag_2 = self.catalog.mag_1,self.catalog.mag_2
        lon, lat = self.catalog.lon,self.catalog.lat
        u_density = self.isochrone.pdf_mmd(lon,lat,mag_1,mag_2,distance_modulus,self.mask,self.delta_mag,mass_steps)

        #u_color = u_density * self.delta_mag**2
        u_color = u_density

        # ADW: Should calculate observable fraction here as well...

        return u_color
Exemple #54
0
    def _defineVariables(self):
        """
        Helper funtion to define pertinent variables from catalog data.

        ADW (20170627): This has largely been replaced by properties.
        """
        logger.info('Catalog contains %i objects'%(len(self.data)))

        mc_source_id_field = self.config['catalog']['mc_source_id_field']
        if mc_source_id_field is not None:
            if mc_source_id_field not in self.data.dtype.names:
                array = np.zeros(len(self.data),dtype='>i8') # FITS byte-order convention
                self.data = mlab.rec_append_fields(self.data,
                                                   names=mc_source_id_field,
                                                   arrs=array)
            logger.info('Found %i simulated objects'%(np.sum(self.mc_source_id>0)))
Exemple #55
0
def add_column(filename,column,formula,force=False):
    """ Add a column to a FITS file.

    ADW: Could this be replaced by a ftool?
    """
    columns = parse_formula(formula)
    logger.info("Running file: %s"%filename)
    logger.debug("  Reading columns: %s"%columns)
    data = fitsio.read(filename,columns=columns)

    logger.debug('  Evaluating formula: %s'%formula)
    col = eval(formula)

    col = np.asarray(col,dtype=[(column,col.dtype)])
    insert_columns(filename,col,force=force)
    return True
Exemple #56
0
 def write(self, filename, data=None):
     if data is None: data = self.results
     logger.info("Writing %s..."%filename)
     if filename.endswith('.npy'):
         np.save(filename,data)
     elif filename.endswith('.fits'):
         # Copies data, so be careful..
         out = np.rec.array(data)
         out.dtype.names = np.char.upper(out.dtype.names)
         hdu = pyfits.new_table(out)
         hdu.writeto(filename,clobber=True)
     elif filename.endswith('.txt') or filename.endswith('.dat'):
         np.savetxt(filename,data)
     elif filename.endswith('.txt') or filename.endswith('.dat'):
         np.savetxt(filename,data)
     else:
         raise Exception('Unrecognized file extension: %s'%filename)
Exemple #57
0
    def download(self,age=None,metallicity=None,outdir=None,force=False):
        """
        Check valid parameter range and download isochrones from:
        http://stev.oapd.inaf.it/cgi-bin/cmd
        """
        try:
            from urllib.error import URLError
        except ImportError:
            from urllib2 import URLError

        if age is None: age = float(self.age)
        if metallicity is None: metallicity = float(self.metallicity)

        if outdir is None: outdir = './'
        basename = self.params2filename(age,metallicity)
        outfile = os.path.join(outdir,basename)
            
        if os.path.exists(outfile) and not force:
            try:
                self.verify(outfile,self.survey,age,metallicity)
                logger.info("Found %s; skipping..."%(outfile))
                return
            except Exception as e:
                msg = "Overwriting corrupted %s..."%(outfile)
                logger.warn(msg)
                os.remove(outfile)
                
        mkdir(outdir)

        self.print_info(age,metallicity)
        self.query_server(outfile,age,metallicity)

        if not os.path.exists(outfile):
            raise RuntimeError('Download failed')

        try:
            self.verify(outfile,self.survey,age,metallicity)
        except Exception as e:
            msg = "Output file is corrupted."
            logger.error(msg)
            msg = "Removing %s."%outfile
            logger.info(msg)
            os.remove(outfile)
            raise(e)

        return outfile
Exemple #58
0
def do_results(args):
    """ Write the results output file """
    config,name,label,coord = args

    filenames = make_filenames(config,label)
    srcfile = filenames['srcfile']
    samples = filenames['samfile']

    if not exists(srcfile):
        logger.warning("Couldn't find %s; skipping..."%srcfile)
        return
    if not exists(samples):
        logger.warning("Couldn't find %s; skipping..."%samples)
        return

    logger.info("Writing %s..."%srcfile)
    ugali.analysis.mcmc.write_results(config,srcfile,samples,srcfile)