Beispiel #1
0
    def sample_lonlat(self, n):
        """
        Sample 2D distribution of points in lon, lat
        """
        # From http://en.wikipedia.org/wiki/Ellipse#General_parametric_form
        # However, Martin et al. (2009) use PA theta "from North to East"
        # Definition of phi (position angle) is offset by pi/4
        # Definition of t (eccentric anamoly) remains the same (x,y-frame usual)
        # In the end, everything is trouble because we use glon, glat...

        radius = self.sample_radius(n)
        a = radius; b = self.jacobian * radius

        t = 2. * np.pi * numpy.random.rand(n)
        cost,sint = np.cos(t),np.sin(t)
        phi = np.pi/2. - np.deg2rad(self.theta)
        cosphi,sinphi = np.cos(phi),np.sin(phi)
        x = a*cost*cosphi - b*sint*sinphi
        y = a*cost*sinphi + b*sint*cosphi
        
        if self.projector  is None:
            logger.debug("Creating AITOFF projector for sampling")
            projector = Projector(self.lon,self.lat,'ait')
        else:
            projector = self.projector
        lon, lat = projector.imageToSphere(x, y)
        return lon, lat
Beispiel #2
0
def load_infiles(infiles,columns=None,multiproc=False):
    if isinstance(infiles,str):
        infiles = [infiles]

    logger.debug("Loading %s files..."%len(infiles))

    args = list(zip(infiles,len(infiles)*[columns]))

    if multiproc:
        from multiprocessing import Pool
        processes = multiproc if multiproc > 0 else None
        p = Pool(processes,maxtasksperchild=1)
        out = p.map(load,args)
    else:
        out = [load(arg) for arg in args]

    dtype = out[0].dtype
    for i,d in enumerate(out):
        if d.dtype != dtype: 
            # ADW: Not really safe...
            logger.warn("Casting input data to same type.")
            out[i] = d.astype(dtype)

    logger.debug('Concatenating arrays...')
    return np.concatenate(out)
Beispiel #3
0
def simple_maglims(config,dirname='simple',force=False):
    """
    Create simple, uniform magnitude limits based on nominal
    survey depth.
    """
    filenames = config.getFilenames()
    release = config['data']['release'].lower()
    #band_1 = config['isochrone']['mag_1_field']
    #band_2 = config['isochrone']['mag_2_field']
    band_1 = config['catalog']['mag_1_field']
    band_2 = config['catalog']['mag_2_field']
    mask_1 = filenames['mask_1'].compressed()
    mask_2 = filenames['mask_2'].compressed()
    basedir,basename = os.path.split(config['mask']['dirname'])
    if basename == dirname:
        raise Exception("Input and output directory are the same.")
    outdir = mkdir(os.path.join(basedir,dirname))

    for band, infiles in [(band_1,mask_1),(band_2,mask_2)]:
        maglim = MAGLIMS[release][band]
        for infile in infiles:
            basename = os.path.basename(infile)
            outfile = join(outdir,basename)
            logger.debug('Reading %s...'%infile)
            f = pyfits.open(infile)
            f[1].data['MAGLIM'][:] = maglim
            logger.debug('Writing %s...'%outfile)
            f.writeto(outfile,clobber=True)
Beispiel #4
0
def inFootprint(config, pixels, nside=None):
    """
    Open each valid filename for the set of pixels and determine the set 
    of subpixels with valid data.
    """
    config = Config(config)
    nside_catalog    = config['coords']['nside_catalog']
    nside_likelihood = config['coords']['nside_likelihood']
    nside_pixel      = config['coords']['nside_pixel']

    if np.isscalar(pixels): pixels = np.array([pixels])
    if nside is None: nside = nside_likelihood

    filenames = config.getFilenames()
    catalog_pixels = filenames['pix'].compressed()

    inside = np.zeros(len(pixels), dtype=bool)
    if not nside_catalog:
        catalog_pix = [0]
    else:
        catalog_pix = superpixel(pixels,nside,nside_catalog)
        catalog_pix = np.intersect1d(catalog_pix,catalog_pixels)

    for fnames in filenames[catalog_pix]:
        logger.debug("Loading %s"%filenames['mask_1'])
        #subpix_1,val_1 = ugali.utils.skymap.readSparseHealpixMap(fnames['mask_1'],'MAGLIM',construct_map=False)
        _nside,subpix_1,val_1 = ugali.utils.healpix.read_partial_map(fnames['mask_1'],'MAGLIM',fullsky=False)
        logger.debug("Loading %s"%fnames['mask_2'])
        #subpix_2,val_2 = ugali.utils.skymap.readSparseHealpixMap(fnames['mask_2'],'MAGLIM',construct_map=False)
        _nside,subpix_2,val_2 = ugali.utils.healpix.read_partial_map(fnames['mask_2'],'MAGLIM',fullsky=False)
        subpix = np.intersect1d(subpix_1,subpix_2)
        superpix = np.unique(superpixel(subpix,nside_pixel,nside))
        inside |= np.in1d(pixels, superpix)
        
    return inside
Beispiel #5
0
    def absolute_magnitude_martin(self, richness=1, steps=1e4, n_trials=1000, mag_bright=None, mag_faint=23., alpha=0.32, seed=None):
        """
        Calculate the absolute magnitude (Mv) of the isochrone using
        the prescription of Martin et al. 2008.
        
        ADW: Seems like the faint and bright limits should depend on the survey maglim?

        Parameters:
        -----------
        richness   : Isochrone nomalization factor
        steps      : Number of steps for sampling the isochrone.
        n_trials   : Number of bootstrap samples
        mag_bright : Bright magnitude limit [SDSS g-band] for luminosity calculation
        mag_faint  : Faint magnitude limit [SDSS g-band] for luminosity calculation
        alpha      : Output confidence interval (1-alpha)
        seed       : Random seed

        Returns:
        --------
        med,lo,hi : Total absolute magnitude interval
        """
        # ADW: This function is not quite right. It should restrict
        # the catalog to the obsevable space using the mask in each
        # pixel.  This becomes even more complicated when we transform
        # the isochrone into SDSS g,r...
        if seed is not None: np.random.seed(seed)

        # Create a copy of the isochrone in the SDSS system
        params = {k:v.value for k,v in self._params.items()}
        params.update(band_1='g',band_2='r',survey='sdss')
        iso = self.__class__(**params)

        # Analytic part (below detection threshold)
        # g, r are absolute magnitudes
        mass_init, mass_pdf, mass_act, sdss_g, sdss_r = iso.sample(mass_steps = steps)
        V = jester_mag_v(sdss_g, sdss_r)
        cut = ( (sdss_g + iso.distance_modulus) > mag_faint)
        mag_unobs = sum_mags(V[cut], weights = richness * mass_pdf[cut])

        # Stochastic part (above detection threshold)
        abs_mag_v = np.zeros(n_trials)
        for i in range(n_trials):
            if i%100==0: logger.debug('%i absolute magnitude trials'%i)
            # g,r are apparent magnitudes
            sdss_g, sdss_r = iso.simulate(richness * iso.stellar_mass())
            cut = (sdss_g < mag_faint) 
            # V is absolute magnitude
            V = jester_mag_v(sdss_g[cut]-iso.distance_modulus,
                             sdss_r[cut]-iso.distance_modulus)
            mag_obs = sum_mags(V)
            abs_mag_v[i] = sum_mags([mag_obs,mag_unobs])

        # ADW: Careful, fainter abs mag is larger (less negative) number
        q = [100*alpha/2., 50, 100*(1-alpha/2.)]
        hi,med,lo = np.percentile(abs_mag_v,q)
        return ugali.utils.stats.interval(med,lo,hi)
Beispiel #6
0
def load_file(kwargs):
    """ Load a FITS file with kwargs. 

    Parameters:
    kwargs : keyword arguments passed to fitsio.read
    Returns:
    ndarray : fits catalog data
    """
    logger.debug("Loading %s..."%kwargs['filename'])
    return fitsio.read(**kwargs)
Beispiel #7
0
    def labelHealpix(pixels, values, nside, threshold=0, xsize=1000):
        """
        Label contiguous regions of a (sparse) HEALPix map. Works by mapping 
        HEALPix array to a Mollweide projection and applying scipy.ndimage.label
     
        Assumes non-nested HEALPix map.
        
        Parameters:
        pixels    : Pixel values associated to (sparse) HEALPix array
        values    : (Sparse) HEALPix array of data values
        nside     : HEALPix dimensionality
        threshold : Threshold value for object detection
        xsize     : Size of Mollweide projection
        
        Returns:
        labels, nlabels
        """
        proj = healpy.projector.MollweideProj(xsize=xsize)
        vec = healpy.pix2vec(nside,pixels)
        xy = proj.vec2xy(vec)
        ij = proj.xy2ij(xy)
        xx,yy = proj.ij2xy()
     
        # Convert to Mollweide
        searchims = []
        if values.ndim < 2: iterate = [values]
        else:               iterate = values.T
        for i,value in enumerate(iterate):
            logger.debug("Labeling slice %i...")
            searchim = numpy.zeros(xx.shape,dtype=bool)
            select = (value > threshold)
            yidx = ij[0][select]; xidx = ij[1][select]
            searchim[yidx,xidx] |= True
            searchims.append( searchim )
        searchims = numpy.array(searchims)

        # Full binary structure
        s = ndimage.generate_binary_structure(searchims.ndim,searchims.ndim)
     
        ### # Dilate in the z-direction
        logger.info("  Dilating image...")
        searchims = ndimage.binary_dilation(searchims,s,1)
        
        # Do the labeling
        logger.info("  Labeling image...")
        labels,nlabels = ndimage.label(searchims,structure=s)

        # Convert back to healpix
        pix_labels = labels[:,ij[0],ij[1]].T
        pix_labels = pix_labels.reshape(values.shape)
        pix_labels *= (values > threshold) # re-trim

        return pix_labels, nlabels
Beispiel #8
0
    def createLabels2D(self):
        """ 2D labeling at zmax """
        logger.debug("  Creating 2D labels...")
        self.zmax = np.argmax(self.values,axis=1)
        self.vmax = self.values[np.arange(len(self.pixels),dtype=int),self.zmax]

        kwargs=dict(pixels=self.pixels,values=self.vmax,nside=self.nside,
                    threshold=self.threshold,xsize=self.xsize)
        labels,nlabels = CandidateSearch.labelHealpix(**kwargs)
        self.nlabels = nlabels
        self.labels = np.repeat(labels,len(self.distances)).reshape(len(labels),len(self.distances))
        return self.labels, self.nlabels
Beispiel #9
0
def stellarDensity(infile, nside=256, lon_field='RA', lat_field='DEC'): 
    area = hp.nside2pixarea(nside,degrees=True)
    logger.debug("Reading %s"%infile)
    data = fitsio.read(infile,columns=[lon_field,lat_field])

    lon,lat = data[lon_field],data[lat_field]
    pix = ang2pix(nside,lon,lat)
    counts = collections.Counter(pix)
    pixels, number = np.array(sorted(counts.items())).T
    density = number/area

    return pixels, density
Beispiel #10
0
def stellarDensity(infile, nside=2**8): 
    area = healpy.nside2pixarea(nside,degrees=True)
    f = pyfits.open(infile)
    data = f[1].data
    logger.debug("Reading %s"%infile)
    
    glon,glat = data['GLON'],data['GLAT']
    pix = ang2pix(nside,glon,glat,coord='GAL')
    counts = collections.Counter(pix)
    pixels, number = numpy.array(sorted(counts.items())).T
    density = number/area
    f.close()

    return pixels, density
Beispiel #11
0
    def __init__(self, infiles, roi):
        """
        Parameters:
        -----------
        infiles : list of sparse healpix mask files
        roi : roi object

        Returns:
        --------
        mask : MaskBand object
        """
        self.roi = roi
        self.config = self.roi.config

        # ADW: It's overkill to make the full map just to slim it
        # down, but we don't have a great way to go from map pixels to
        # roi pixels.
        nside,pixel,maglim = healpix.read_partial_map(infiles,column='MAGLIM')
        self.nside = nside

        # Sparse maps of pixels in various ROI regions
        self.mask_roi_sparse = maglim[self.roi.pixels] 

        # Try to get the detection fraction
        self.frac_roi_sparse = (self.mask_roi_sparse > 0)
        try: 
            logger.debug("Reading FRACDET...")
            nside,pixel,frac=healpix.read_partial_map(infiles,column='FRACDET')
            # This clipping might gloss over bugs...
            fractype = self.config['mask'].get('fractype','binary')
            fracmin = self.config['mask'].get('fracmin',0.5)
            if fractype == 'binary':
                frac = np.where(frac < fracmin, 0.0, 1.0)
            elif fractype == 'full':
                frac = np.where(frac < fracmin, 0.0, frac)
            elif not fractype:
                pass
            else:
                msg = "Unrecognized fractype: %s"%fractype
                logger.warn(msg)
                
            self.frac_roi_sparse = np.clip(frac[self.roi.pixels],0.0,1.0)
        except ValueError as e:
            # No detection fraction present
            msg = "No 'FRACDET' column found in masks; assuming FRACDET = 1.0"
            logger.info(msg)

        # Explicitly zero the maglim of pixels with fracdet < fracmin
        self.mask_roi_sparse[self.frac_roi_sparse == 0] = 0.0
Beispiel #12
0
def get_ugali_dir():
    """Get the path to the ugali data directory from the environment"""

    ugalidir = os.getenv('UGALIDIR')

    # Get the HOME directory
    if not ugalidir:
        ugalidir=os.path.join(os.getenv('HOME'),'.ugali')

    if not os.path.exists(ugalidir):
        from ugali.utils.logger import logger
        msg = "Creating UGALIDIR:\n%s"%ugalidir
        logger.debug(msg)

    return mkdir(ugalidir)
Beispiel #13
0
    def createObjects(self):
        logger.debug("  Creating objects...")
        hist,edges,rev = reverseHistogram(self.labels,bins=numpy.arange(self.nlabels+2))
        self.rev = rev
        # Make some cut on the minimum size of a labelled object
        good, = numpy.where( (hist >= self.minpix) )
        # Get rid of zero label (below threshold)
        self.good, = numpy.nonzero(good)

        kwargs=dict(pixels=self.pixels,values=self.values,nside=self.nside, 
                    zvalues=self.distances, rev=self.rev, good=self.good)
        objects = self.findObjects(**kwargs)
        self.objects = self.finalizeObjects(objects)

        return self.objects
Beispiel #14
0
def add_column(filename,column,formula,force=False):
    """ Add a column to a FITS file.

    ADW: Could this be replaced by a ftool?
    """
    columns = parse_formula(formula)
    logger.info("Running file: %s"%filename)
    logger.debug("  Reading columns: %s"%columns)
    data = fitsio.read(filename,columns=columns)

    logger.debug('  Evaluating formula: %s'%formula)
    col = eval(formula)

    col = np.asarray(col,dtype=[(column,col.dtype)])
    insert_columns(filename,col,force=force)
    return True
Beispiel #15
0
    def getFilenames(self,pixels=None):
        """
        Return the requested filenames.

        Parameters:
        -----------
        pixels : requeseted pixels

        Returns:
        --------
        filenames : recarray
        """
        logger.debug("Getting filenames...")
        if pixels is None:
            return self.filenames
        else:
            return self.filenames[np.in1d(self.filenames['pix'],pixels)]
Beispiel #16
0
def mergeLikelihoodFiles(infiles, lkhdfile, roifile):
    mergeSparseHealpixMaps(infiles,lkhdfile)

    ext='PIX_DATA'
    keys=['STELLAR','NINSIDE','NANNULUS']
    nside = pyfits.open(infiles[0])[ext].header['LKDNSIDE']

    pix_array = []
    data_dict = dict([(k,[]) for k in keys])
    for ii in range(0, len(infiles)):
        logger.debug('(%i/%i) %s'%(ii+1, len(infiles), infiles[ii]))
        reader = pyfits.open(infiles[ii])
        pix_array.append(reader[ext].header['LKDPIX'])
        for key in data_dict.keys():
            data_dict[key].append(reader[ext].header[key])
        
    pix_array = numpy.array(pix_array)
    for key in data_dict.keys():
        data_dict[key] = numpy.array(data_dict[key])
    writeSparseHealpixMap(pix_array, data_dict, nside, roifile)
Beispiel #17
0
    def download(self,age,metallicity,outdir=None,force=False):
        """
        Check valid parameter range and download isochrones from:
        http://stev.oapd.inaf.it/cgi-bin/cmd
        """
        if outdir is None: outdir = './'
        basename = self.isochrone.params2filename(age,metallicity)
        outfile = os.path.join(outdir,basename)
            
        if os.path.exists(outfile) and not force:
            try:
                self.verify(outfile,self.survey,age,metallicity)
                logger.info("Found %s; skipping..."%(outfile))
                return
            except Exception as e:
                msg = "Overwriting corrupted %s..."%(outfile)
                logger.warn(msg)
                #os.remove(outfile)
                
        mkdir(outdir)

        self.print_info(age,metallicity)

        try:
            self.query_server(outfile,age,metallicity)
        except Exception as e:
            logger.debug(str(e))
            raise RuntimeError('Bad server response')

        if not os.path.exists(outfile):
            raise RuntimeError('Download failed')

        try:
            self.verify(outfile,self.survey,age,metallicity)
        except Exception as e:
            msg = "Output file is corrupted."
            logger.error(msg)
            #os.remove(outfile)
            raise(e)

        return outfile
Beispiel #18
0
    def lnprob(self,theta):
        """ Logarithm of the probability """
        global niter
        params,priors,loglike = self.params,self.priors,self.loglike
        # Avoid extra likelihood calls with bad priors
        _lnprior = self.lnprior(theta)
        if np.isfinite(_lnprior):
            _lnlike = self.lnlike(theta)
        else:
            _lnprior = -np.inf
            _lnlike = -np.inf

        _lnprob = _lnprior + _lnlike
     
        if (niter%100==0):
            msg = "%i function calls ...\n"%niter
            msg+= ', '.join('%s: %.3f'%(k,v) for k,v in zip(params,theta))
            msg+= '\nlog(like): %.3f, log(prior): %.3f'%(_lnprior,_lnlike)
            logger.debug(msg)
        niter+=1
        return _lnprob
Beispiel #19
0
def pixelizeCatalog(infiles, config, force=False):
    """
    Break catalog up into a set of healpix files.
    """
    nside_catalog = config['coords']['nside_catalog']
    nside_pixel = config['coords']['nside_pixel']
    outdir = mkdir(config['catalog']['dirname'])
    filenames = config.getFilenames()
    
    for ii,infile in enumerate(infiles):
        logger.info('(%i/%i) %s'%(ii+1, len(infiles), infile))
        f = pyfits.open(infile)
        data = f[1].data
        header = f[1].header
        logger.info("%i objects found"%len(data))
        if not len(data): continue
        glon,glat = cel2gal(data['RA'],data['DEC'])
        catalog_pix = ang2pix(nside_catalog,glon,glat,coord='GAL')
        pixel_pix = ang2pix(nside_pixel,glon,glat,coord='GAL')
        names = [n.upper() for n in data.columns.names]
        ra_idx = names.index('RA'); dec_idx = names.index('DEC')
        idx = ra_idx if ra_idx > dec_idx else dec_idx
        catalog_pix_name = 'PIX%i'%nside_catalog
        pixel_pix_name = 'PIX%i'%nside_pixel

        coldefs = pyfits.ColDefs(
            [pyfits.Column(name='GLON',format='1D',array=glon),
             pyfits.Column(name='GLAT',format='1D',array=glat),
             pyfits.Column(name=catalog_pix_name,format='1J',array=catalog_pix),
             pyfits.Column(name=pixel_pix_name  ,format='1J',array=pixel_pix)]
        )
        hdu = pyfits.new_table(data.columns[:idx+1]+coldefs+data.columns[idx+1:])
        table = hdu.data

        for pix in numpy.unique(catalog_pix):
            logger.debug("Processing pixel %s"%pix)
            outfile = filenames.data['catalog'][pix]
            if not os.path.exists(outfile):
                logger.debug("Creating %s"%outfile)
                names = [n.upper() for n in table.columns.names]
                formats = table.columns.formats
                columns = [pyfits.Column(n,f) for n,f in zip(names,formats)]
                out = pyfits.HDUList([pyfits.PrimaryHDU(),pyfits.new_table(columns)])
                out[1].header['NSIDE'] = nside_catalog
                out[1].header['PIX'] = pix
                out.writeto(outfile)
            hdulist = pyfits.open(outfile,mode='update')
            t1 = hdulist[1].data
            # Could we speed up with sorting and indexing?
            t2 = table[ table[catalog_pix_name] == pix ]
            nrows1 = t1.shape[0]
            nrows2 = t2.shape[0]
            nrows = nrows1 + nrows2
            out = pyfits.new_table(t1.columns, nrows=nrows)
            for name in t1.columns.names:
                out.data.field(name)[nrows1:]=t2.field(name)
            hdulist[1] = out
            logger.debug("Writing %s"%outfile)
            hdulist.flush()
            hdulist.close()
Beispiel #20
0
    def query_server(self, outfile, age, metallicity):
        z = metallicity
        feh = self.z2feh(z)
        
        params = copy.deepcopy(self.download_defaults)
        params['age']=age
        params['feh']='%.6f'%feh
        params['clr']=dict_clr[self.survey]

        server = self.download_url
        url = server + '/models/isolf_new.php'
        # First check that the server is alive
        logger.debug("Accessing %s..."%url)
        urlopen(url,timeout=2)

        query = url + '?' + urlencode(params)
        logger.debug(query)
        response = urlopen(query)
        page_source = str(response.read())
        try:
            file_id = int(page_source.split('tmp/tmp')[-1].split('.iso')[0])
        except Exception as e:
            logger.debug(str(e))
            msg = 'Output filename not found'
            raise RuntimeError(msg)

        infile = 'http://stellar.dartmouth.edu/models/tmp/tmp%s.iso'%(file_id)
        command = 'wget -q %s -O %s'%(infile, outfile)
        subprocess.call(command,shell=True)
Beispiel #21
0
def readSparseHealpixMaps(infiles, field, extension='PIX_DATA', default_value=healpy.UNSEEN, construct_map=True):
    """
    Read multiple sparse healpix maps and output the results
    identically to a single file read.
    """
    if isinstance(infiles,basestring): infiles = [infiles]

    pix_array   = []
    value_array = []

    # Create a map based on the first file in the list
    map = readSparseHealpixMap(infiles[0], field, extension=extension, default_value=healpy.UNSEEN, construct_map=True)

    for ii in range(0, len(infiles)):
        logger.debug('(%i/%i) %s'%(ii+1, len(infiles), infiles[ii]))
        pix_array_current, value_array_current = readSparseHealpixMap(infiles[ii], field,
                                                                      extension=extension,
                                                                      construct_map=False)
        pix_array.append(pix_array_current)
        value_array.append(value_array_current)
        map[pix_array[ii]] = value_array[ii]

    # Check to see whether there are any conflicts
    pix_master = numpy.concatenate(pix_array)
    value_master = numpy.concatenate(value_array)

    n_conflicting_pixels = len(pix_master) - len(numpy.unique(pix_master)) 
    if n_conflicting_pixels != 0:
        logger.warning('%i conflicting pixels during merge.'%(n_conflicting_pixels))

    if construct_map:
        return map
    else:
        if n_conflicting_pixels == 0:
            pix_master = numpy.sort(pix_master)
            return pix_master, map[pix_master]
        else:
            pix_valid = numpy.nonzero(map != default_value)[0]
            return pix_valid, map[pix_valid]
Beispiel #22
0
def load_files(filenames,multiproc=False,**kwargs):
    """ Load a set of FITS files with kwargs. """
    filenames = np.atleast_1d(filenames)
    logger.debug("Loading %s files..."%len(filenames))

    kwargs = [dict(filename=f,**kwargs) for f in filenames]

    if multiproc:
        from multiprocessing import Pool
        processes = multiproc if multiproc > 0 else None
        p = Pool(processes,maxtasksperchild=1)
        out = p.map(load_file,kwargs)
    else:
        out = [load_file(kw) for kw in kwargs]

    dtype = out[0].dtype
    for i,d in enumerate(out):
        if d.dtype != dtype: 
            # ADW: Not really safe...
            logger.warn("Casting input data to same type.")
            out[i] = d.astype(dtype,copy=False)

    logger.debug('Concatenating arrays...')
    return np.concatenate(out)
Beispiel #23
0
def inMangle(polyfile,ra,dec):
    coords = tempfile.NamedTemporaryFile(suffix='.txt',delete=False)
    logger.debug("Writing coordinates to %s"%coords.name)
    np.savetxt(coords, np.array( [ra,dec] ).T, fmt='%.6g' )
    coords.close()

    weights = tempfile.NamedTemporaryFile(suffix='.txt',delete=False)
    cmd = "polyid -W %s %s %s"%(polyfile,coords.name,weights.name)
    logger.debug(cmd)
    subprocess.call(cmd,shell=True)

    tmp = tempfile.NamedTemporaryFile(suffix='.txt',delete=False)
    cmd = """awk '{if($3==""){$3=0} print $1, $2, $3}' %s > %s"""%(weights.name,tmp.name)
    logger.debug(cmd)
    subprocess.call(cmd,shell=True)

    data = np.loadtxt(tmp.name,unpack=True,skiprows=1)[-1]
    for f in [coords,weights,tmp]:
        logger.debug("Removing %s"%f.name)
        os.remove(f.name)

    return data > 0
Beispiel #24
0
    def query_server(self,outfile,age,metallicity):
        epsilon = 1e-4
        lage = np.log10(age*1e9)
        lage_min,lage_max = self.defaults['isoc_lage0'],self.defaults['isoc_lage1']
        if not (lage_min-epsilon < lage <lage_max+epsilon):
            msg = 'Age outside of valid range: %g [%g < log(age) < %g]'%(lage,lage_min,lage_max)
            raise RuntimeError(msg)

        z_min,z_max = self.defaults['isoc_z0'],self.defaults['isoc_z1']
        if not (z_min <= metallicity <= z_max):
            msg = 'Metallicity outside of valid range: %g [%g < z < %g]'%(metallicity,z_min,z_max)
            raise RuntimeError(msg)

        d = dict(self.defaults)
        d['photsys_file'] = photsys_dict[self.survey]
        d['isoc_age']     = age * 1e9
        d['isoc_zeta']    = metallicity

        server = 'http://stev.oapd.inaf.it'
        url = server + '/cgi-bin/cmd_%s'%d['cmd_version']
        logger.debug("Accessing %s..."%url)

        q = urlencode(d)
        logger.debug(url+'?'+q)
        c = urlopen(url, q).read()
        aa = re.compile('output\d+')
        fname = aa.findall(c)
        
        if len(fname) == 0:
            msg = "Output filename not found"
            raise RuntimeError(msg)

        out = '{0}/~lgirardi/tmp/{1}.dat'.format(server, fname[0])
        cmd = 'wget %s -O %s'%(out,outfile)
        logger.debug(cmd)
        stdout = subprocess.check_output(cmd,shell=True,stderr=subprocess.STDOUT)
        logger.debug(stdout)

        return outfile
Beispiel #25
0
    def clip_catalog(self):
        # ROI-specific catalog
        logger.debug("Clipping full catalog...")
        cut_observable = self.mask.restrictCatalogToObservableSpace(self.catalog_full)

        # All objects within disk ROI
        logger.debug("Creating roi catalog...")
        self.catalog_roi = self.catalog_full.applyCut(cut_observable)
        self.catalog_roi.project(self.roi.projector)
        self.catalog_roi.spatialBin(self.roi)

        # All objects interior to the background annulus
        logger.debug("Creating interior catalog...")
        cut_interior = numpy.in1d(ang2pix(self.config['coords']['nside_pixel'], self.catalog_roi.lon, self.catalog_roi.lat), 
                                  self.roi.pixels_interior)
        #cut_interior = self.roi.inInterior(self.catalog_roi.lon,self.catalog_roi.lat)
        self.catalog_interior = self.catalog_roi.applyCut(cut_interior)
        self.catalog_interior.project(self.roi.projector)
        self.catalog_interior.spatialBin(self.roi)

        # Set the default catalog
        #logger.info("Using interior ROI for likelihood calculation")
        self.catalog = self.catalog_interior
Beispiel #26
0
def run(self):
    outdir = self.config['output']['simdir']
    logdir = join(outdir, 'log')

    if 'simulate' in self.opts.run:
        logger.info("Running 'simulate'...")
        mkdir(outdir)
        mkdir(logdir)

        if self.opts.num is None:
            self.opts.num = self.config['simulate']['njobs']
        for i in range(self.opts.num):
            outfile = join(outdir, self.config['output']['simfile'] % i)
            base = splitext(os.path.basename(outfile))[0]
            logfile = join(logdir, base + '.log')
            jobname = base
            script = self.config['simulate']['script']
            cmd = '%s %s %s --seed %i' % (script, self.opts.config, outfile, i)
            #cmd='%s %s %s'%(script,self.opts.config,outfile)
            self.batch.submit(cmd, jobname, logfile)
            time.sleep(0.1)

    if 'sensitivity' in self.opts.run:
        logger.info("Running 'sensitivity'...")

    if 'merge' in self.opts.run:
        logger.info("Running 'merge'...")

        filenames = join(
            outdir, self.config['output']['simfile']).split('_%')[0] + '_*'
        infiles = sorted(glob.glob(filenames))

        f = pyfits.open(infiles[0])
        table = np.empty(0, dtype=data.dtype)
        for filename in infiles:
            logger.debug("Reading %s..." % filename)
            f = pyfits.open(filename)
            t = f[1].data[~np.isnan(f[1].data['ts'])]
            table = recfuncs.stack_arrays([table, t],
                                          usemask=False,
                                          asrecarray=True)

        logger.info("Found %i simulations." % len(table))
        outfile = join(outdir, "merged_sims.fits")
        hdu = pyfits.new_table(table)
        logger.info("Writing %s..." % outfile)
        hdu.writeto(outfile, clobber=True)

    if 'plot' in self.opts.run:
        logger.info("Running 'plot'...")
        import ugali.utils.plotting
        import pylab as plt

        plotdir = mkdir(self.config['output']['plotdir'])

        data = pyfits.open(join(outdir, "merged_sims.fits"))[1].data
        data = data[~np.isnan(data['ts'])]

        bigfig, bigax = plt.subplots()

        for dist in np.unique(data['fit_distance']):
            logger.info('  Plotting distance: %s' % dist)
            ts = data['ts'][data['fit_distance'] == dist]
            ugali.utils.plotting.drawChernoff(bigax,
                                              ts,
                                              bands='none',
                                              color='gray')

            fig, ax = plt.subplots(1, 2, figsize=(10, 5))
            ugali.utils.plotting.drawChernoff(ax[0],
                                              ts,
                                              bands='none',
                                              pdf=True)
            ugali.utils.plotting.drawChernoff(ax[1], ts)
            fig.suptitle(r'Chernoff ($\mu = %g$)' % dist)
            ax[0].annotate(r"$N=%i$" % len(ts),
                           xy=(0.15, 0.85),
                           xycoords='axes fraction',
                           bbox={
                               'boxstyle': "round",
                               'fc': '1'
                           })
            basename = 'chernoff_u%g.png' % dist
            outfile = os.path.join(plotdir, basename)
            plt.savefig(outfile)
        bigfig.suptitle('Chernoff!')
        basename = 'chernoff_all.png'
        outfile = os.path.join(plotdir, basename)
        plt.savefig(outfile)

        #idx=np.random.randint(len(data['ts'])-1,size=400)
        #idx=slice(400)
        #ugali.utils.plotting.plotChernoff(data['ts'][idx])
        #ugali.utils.plotting.plotChernoff(data['fit_ts'])
        plt.ion()
        """
Beispiel #27
0
    def absolute_magnitude_martin(self, richness=1, steps=1e4, n_trials=1000, mag_bright=16., mag_faint=23., alpha=0.32, seed=None):
        """
        Calculate the absolute magnitude (Mv) of the isochrone using
        the prescription of Martin et al. 2008.
        
        Parameters:
        -----------
        richness : Isochrone nomalization factor
        steps : Number of steps for sampling the isochrone.
        n_trials : Number of bootstrap samples
        mag_bright : Bright magnitude limit for calculating luminosity.
        mag_faint : Faint magnitude limit for calculating luminosity.
        alpha : Output confidence interval (1-alpha)
        seed : Random seed

        Returns:
        --------
        med,lo,hi : Absolute magnitude interval
        """
        # ADW: This function is not quite right. You should be restricting
        # the catalog to the obsevable space (using the function named as such)
        # Also, this needs to be applied in each pixel individually
        
        # Using the SDSS g,r -> V from Jester 2005 [arXiv:0506022]
        # for stars with R-I < 1.15
        # V = g_sdss - 0.59(g_sdss-r_sdss) - 0.01
        # g_des = g_sdss - 0.104(g_sdss - r_sdss) + 0.01
        # r_des = r_sdss - 0.102(g_sdss - r_sdss) + 0.02
        np.random.seed(seed)

        if self.survey.lower() != 'des':
            raise Exception('Only valid for DES')
        if 'g' not in [self.band_1,self.band_2]:
            msg = "Need g-band for absolute magnitude"
            raise Exception(msg)    
        if 'r' not in [self.band_1,self.band_2]:
            msg = "Need r-band for absolute magnitude"
            raise Exception(msg)    
        
        def visual(g, r, pdf=None):
            v = g - 0.487 * (g - r) - 0.0249
            if pdf is None:
                flux = np.sum(10**(-v / 2.5))
            else:
                flux = np.sum(pdf * 10**(-v / 2.5))
            abs_mag_v = -2.5 * np.log10(flux)
            return abs_mag_v

        def sumMag(mag_1, mag_2):
            flux_1 = 10**(-mag_1 / 2.5)
            flux_2 = 10**(-mag_2 / 2.5)
            return -2.5 * np.log10(flux_1 + flux_2)

        # Analytic part
        mass_init, mass_pdf, mass_act, mag_1, mag_2 = self.sample(mass_steps = steps)
        g,r = (mag_1,mag_2) if self.band_1 == 'g' else (mag_2,mag_1)
        #cut = np.logical_not((g > mag_bright) & (g < mag_faint) & (r > mag_bright) & (r < mag_faint))
        cut = ((g + self.distance_modulus) > mag_faint) if self.band_1 == 'g' else ((r + self.distance_modulus) > mag_faint)
        mag_unobs = visual(g[cut], r[cut], richness * mass_pdf[cut])

        # Stochastic part
        abs_mag_obs_array = np.zeros(n_trials)
        for ii in range(0, n_trials):
            if ii%100==0: logger.debug('%i absolute magnitude trials'%ii)
            g, r = self.simulate(richness * self.stellar_mass())
            #cut = (g > 16.) & (g < 23.) & (r > 16.) & (r < 23.)
            cut = (g < mag_faint) if self.band_1 == 'g' else (r < mag_faint)
            mag_obs = visual(g[cut] - self.distance_modulus, r[cut] - self.distance_modulus)
            abs_mag_obs_array[ii] = sumMag(mag_obs, mag_unobs)

        # ADW: This shouldn't be necessary
        #abs_mag_obs_array = np.sort(abs_mag_obs_array)[::-1]

        # ADW: Careful, fainter abs mag is larger (less negative) number
        q = [100*alpha/2., 50, 100*(1-alpha/2.)]
        hi,med,lo = np.percentile(abs_mag_obs_array,q)
        return ugali.utils.stats.interval(med,lo,hi)
Beispiel #28
0
def writeColorLUT2(config,
                   outfile=None,
                   isochrone=None,
                   distance_modulus_array=None,
                   delta_mag=None,
                   mag_err_array=None,
                   mass_steps=10000,
                   plot=False):
    """
    Precompute a 4-dimensional signal color probability look-up table to speed up the likelihood evaluation.
    Inputs are a Config object (or file name), an Isochrone object,
    an array of distance moduli at which to evaluate the signal color probability,
    and an array of magnitude uncertainties which set the bin edges of those dimensions (zero implicity included).
    Finally there is an outfile name.
    """
    if plot: import ugali.utils.plotting

    if type(config) == str:
        config = ugali.utils.config.Config(config)
    if outfile is None:
        outfile = config.params['color_lut']['filename']
    if isochrone is None:
        isochrones = []
        for ii, name in enumerate(config.params['isochrone']['infiles']):
            isochrones.append(ugali.isochrone.Isochrone(config, name))
        isochrone = ugali.isochrone.CompositeIsochrone(
            isochrones, config.params['isochrone']['weights'])
    if distance_modulus_array is None:
        distance_modulus_array = config.params['color_lut'][
            'distance_modulus_array']
    if delta_mag is None:
        delta_mag = config.params['color_lut']['delta_mag']
    if mag_err_array is None:
        mag_err_array = config.params['color_lut']['mag_err_array']

    mag_buffer = 0.5  # Safety buffer in magnitudes around the color-magnitude space defined by the ROI
    epsilon = 1.e-10
    if config.params['catalog']['band_1_detection']:
        bins_mag_1 = numpy.arange(
            config.params['mag']['min'] - mag_buffer,
            config.params['mag']['max'] + mag_buffer + epsilon, delta_mag)
        bins_mag_2 = numpy.arange(
            config.params['mag']['min'] - config.params['color']['max'] -
            mag_buffer, config.params['mag']['max'] -
            config.params['color']['min'] + mag_buffer + epsilon, delta_mag)
    else:
        bins_mag_1 = numpy.arange(
            config.params['mag']['min'] + config.params['color']['min'] -
            mag_buffer, config.params['mag']['max'] +
            config.params['color']['max'] + mag_buffer + epsilon, delta_mag)
        bins_mag_2 = numpy.arange(
            config.params['mag']['min'] - mag_buffer,
            config.params['mag']['max'] + mag_buffer + epsilon, delta_mag)

    # Output binning configuration
    #print config.params['catalog']['band_1_detection']
    #print config.params['mag']['min'], config.params['mag']['max']
    #print config.params['color']['min'], config.params['color']['max']

    #print bins_mag_1[0], bins_mag_1[-1], len(bins_mag_1)
    #print bins_mag_2[0], bins_mag_2[-1], len(bins_mag_2)

    isochrone_mass_init, isochrone_mass_pdf, isochrone_mass_act, isochrone_mag_1, isochrone_mag_2 = isochrone.sample(
        mass_steps=mass_steps)

    hdul = pyfits.HDUList()

    for index_distance_modulus, distance_modulus in enumerate(
            distance_modulus_array):

        logger.debug('(%i/%i)' %
                     (index_distance_modulus, len(distance_modulus_array)))

        columns_array = []

        time_start = time.time()

        histo_isochrone_pdf = numpy.histogram2d(
            distance_modulus + isochrone_mag_1,
            distance_modulus + isochrone_mag_2,
            bins=[bins_mag_1, bins_mag_2],
            weights=isochrone_mass_pdf)[0]

        if plot:
            # Checked that axis are plotted correctly
            ugali.utils.plotting.twoDimensionalHistogram(
                'Isochrone',
                'mag_1',
                'mag_2',
                numpy.log10(histo_isochrone_pdf + epsilon).transpose(),
                bins_mag_1,
                bins_mag_2,
                lim_x=None,
                lim_y=None,
                vmin=None,
                vmax=None)

        for index_mag_err_1, mag_err_1 in enumerate(mag_err_array):
            for index_mag_err_2, mag_err_2 in enumerate(mag_err_array):
                logger.debug(
                    '  Distance modulus = %.2f mag_err_1 = %.2f mag_err_2 = %.2f'
                    % (distance_modulus, mag_err_1, mag_err_2))

                mag_1_sigma_step = delta_mag / mag_err_1
                n = int(numpy.ceil(4. / mag_1_sigma_step))
                mag_1_sigma = numpy.arange(-1. * (n + 0.5) * mag_1_sigma_step,
                                           ((n + 0.5) * mag_1_sigma_step) +
                                           epsilon, mag_1_sigma_step)
                mag_1_pdf_array = scipy.stats.norm.cdf(
                    mag_1_sigma[1:]) - scipy.stats.norm.cdf(mag_1_sigma[0:-1])

                mag_2_sigma_step = delta_mag / mag_err_2
                n = int(numpy.ceil(4. / mag_2_sigma_step))
                mag_2_sigma = numpy.arange(-1. * (n + 0.5) * mag_2_sigma_step,
                                           ((n + 0.5) * mag_2_sigma_step) +
                                           epsilon, mag_2_sigma_step)
                mag_2_pdf_array = scipy.stats.norm.cdf(
                    mag_2_sigma[1:]) - scipy.stats.norm.cdf(mag_2_sigma[0:-1])

                mag_1_pdf, mag_2_pdf = numpy.meshgrid(mag_2_pdf_array,
                                                      mag_1_pdf_array)

                pdf = mag_1_pdf * mag_2_pdf

                histo_isochrone_pdf_convolve = scipy.signal.convolve2d(
                    histo_isochrone_pdf, pdf, mode='same')

                if plot:
                    # Checked that axis are plotted correctly
                    ugali.utils.plotting.twoDimensionalHistogram(
                        'Convolved Isochrone',
                        'mag_1',
                        'mag_2',
                        numpy.log10(histo_isochrone_pdf_convolve +
                                    epsilon).transpose(),
                        bins_mag_1,
                        bins_mag_2,
                        lim_x=None,
                        lim_y=None,
                        vmin=None,
                        vmax=None)

                columns_array.append(
                    pyfits.Column(
                        name='%i%i' % (index_mag_err_1, index_mag_err_2),
                        format='%iE' % (histo_isochrone_pdf_convolve.shape[1]),
                        array=histo_isochrone_pdf_convolve))

        hdu = pyfits.new_table(columns_array)
        hdu.header.update('DIST_MOD', distance_modulus)
        hdu.name = '%.2f' % (distance_modulus)
        hdul.append(hdu)

        time_end = time.time()
        logger.debug('%.2f s' % (time_end - time_start))

    # Store distance modulus info
    columns_array = [
        pyfits.Column(name='DISTANCE_MODULUS',
                      format='E',
                      array=distance_modulus_array)
    ]
    hdu = pyfits.new_table(columns_array)
    hdu.name = 'DISTANCE_MODULUS'
    hdul.append(hdu)

    # Store magnitude error info
    columns_array = [
        pyfits.Column(name='BINS_MAG_ERR',
                      format='E',
                      array=numpy.insert(mag_err_array, 0, 0.))
    ]
    hdu = pyfits.new_table(columns_array)
    hdu.name = 'BINS_MAG_ERR'
    hdul.append(hdu)

    # Store magnitude 1 info
    columns_array = [
        pyfits.Column(name='BINS_MAG_1', format='E', array=bins_mag_1)
    ]
    hdu = pyfits.new_table(columns_array)
    hdu.name = 'BINS_MAG_1'
    hdul.append(hdu)

    # Store magnitude 2 info
    columns_array = [
        pyfits.Column(name='BINS_MAG_2', format='E', array=bins_mag_2)
    ]
    hdu = pyfits.new_table(columns_array)
    hdu.name = 'BINS_MAG_2'
    hdul.append(hdu)

    logger.info('Writing look-up table to %s' % (outfile))
    hdul.writeto(outfile, clobber=True)
Beispiel #29
0
def pixelizeCatalog(infiles, config, force=False):
    """
    Break catalog into chunks by healpix pixel.
    
    Parameters:
    -----------
    infiles : List of input files
    config  : Configuration file
    force   : Overwrite existing files (depricated)
    
    Returns:
    --------
    None
    """
    nside_catalog = config['coords']['nside_catalog']
    nside_pixel = config['coords']['nside_pixel']
    coordsys = config['coords']['coordsys'].upper()
    outdir = mkdir(config['catalog']['dirname'])
    filenames = config.getFilenames()
    lon_field = config['catalog']['lon_field'].upper()
    lat_field = config['catalog']['lat_field'].upper()

    # ADW: It would probably be better (and more efficient) to do the
    # pixelizing and the new column insertion separately.
    for i,filename in enumerate(infiles):
        logger.info('(%i/%i) %s'%(i+1, len(infiles), filename))
        data = fitsio.read(filename)
        logger.info("%i objects found"%len(data))
        if not len(data): continue

        columns = map(str.upper,data.dtype.names)
        names,arrs = [],[]

        if (lon_field in columns) and (lat_field in columns):
            lon,lat = data[lon_field],data[lat_field]
        elif coordsys == 'GAL':
            msg = "Columns '%s' and '%s' not found."%(lon_field,lat_field)
            msg += "\nConverting from RA,DEC"
            logger.warning(msg)
            lon,lat = cel2gal(data['RA'],data['DEC'])
            names += [lon_field,lat_field]
            arrs  += [lon,lat]
        elif coordsys == 'CEL':
            msg = "Columns '%s' and '%s' not found."%(lon_field,lat_field)
            msg += "\nConverting from GLON,GLAT"
            lon,lat = gal2cel(data['GLON'],data['GLAT'])
            names  += [lon_field,lat_field]
            arrs   += [lon,lat]

        cat_pix = ang2pix(nside_catalog,lon,lat)
        pix_pix = ang2pix(nside_pixel,lon,lat)
        cat_pix_name = 'PIX%i'%nside_catalog
        pix_pix_name = 'PIX%i'%nside_pixel

        try:
            names += [cat_pix_name,pix_pix_name]
            arrs  += [cat_pix,pix_pix]
            data=mlab.rec_append_fields(data,names=names,arrs=arrs)
        except ValueError as e:
            logger.warn(str(e)+'; not adding column.')
            #data[cat_pix_name] = cat_pix
            #data[pix_pix_name] = pix_pix
                               
        for pix in np.unique(cat_pix):
            logger.debug("Processing pixel %s"%pix)

            arr = data[cat_pix == pix]
            outfile = filenames.data['catalog'][pix]

            if not os.path.exists(outfile):
                logger.debug("Creating %s"%outfile)
                out=fitsio.FITS(outfile,mode='rw')
                out.write(arr)

                hdr=healpix.header_odict(nside=nside_catalog,
                                                     coord=coordsys[0])
                for key in ['PIXTYPE','ORDERING','NSIDE','COORDSYS']:
                    out[1].write_key(*list(hdr[key].values()))
                out[1].write_key('PIX',pix,comment='HEALPIX pixel for this file')
            else:
                out=fitsio.FITS(outfile,mode='rw')
                out[1].append(arr)

            logger.debug("Writing %s"%outfile)
            out.close()
Beispiel #30
0
    def query_server(self, outfile, age, metallicity):
        """ Server query for the isochrone file.

        Parameters:
        -----------
        outfile     : name of output isochrone file
        age         : isochrone age
        metallicity : isochrone metallicity
        
        Returns:
        --------
        outfile     : name of output isochrone file
        """
        params = copy.deepcopy(self.download_defaults)

        epsilon = 1e-4
        lage = np.log10(age * 1e9)

        lage_min = params.get('isoc_lage0', 6.602)
        lage_max = params.get('isoc_lage1', 10.1303)

        if not (lage_min - epsilon < lage < lage_max + epsilon):
            msg = 'Age outside of valid range: %g [%g < log(age) < %g]' % (
                lage, lage_min, lage_max)
            raise RuntimeError(msg)

        z_min = params.get('isoc_z0', 0.0001)
        z_max = params.get('isoc_z1', 0.03)

        if not (z_min <= metallicity <= z_max):
            msg = 'Metallicity outside of valid range: %g [%g < z < %g]' % (
                metallicity, z_min, z_max)
            raise RuntimeError(msg)

        params['photsys_file'] = photsys_dict[self.survey]
        if params['cmd_version'] < 3.3:
            params['isoc_age'] = age * 1e9
            params['isoc_zeta'] = metallicity
        else:
            params['isoc_agelow'] = age * 1e9
            params['isoc_zlow'] = metallicity

        server = self.download_url
        url = server + '/cgi-bin/cmd_%s' % params['cmd_version']
        # First check that the server is alive
        logger.debug("Accessing %s..." % url)
        urlopen(url, timeout=2)

        q = urlencode(params).encode('utf-8')
        logger.debug(url + '?' + q)
        c = str(urlopen(url, q).read())
        aa = re.compile('output\d+')
        fname = aa.findall(c)

        if len(fname) == 0:
            msg = "Output filename not found"
            raise RuntimeError(msg)

        out = '{0}/tmp/{1}.dat'.format(server, fname[0])

        cmd = 'wget --progress dot:binary %s -O %s' % (out, outfile)
        logger.debug(cmd)
        stdout = subprocess.check_output(cmd,
                                         shell=True,
                                         stderr=subprocess.STDOUT)
        logger.debug(stdout)

        return outfile
Beispiel #31
0
    def search(self, coords=None, distance_modulus=None, tolerance=1.e-2):
        """
        Organize a grid search over ROI target pixels and distance moduli in distance_modulus_array
        coords: (lon,lat)
        distance_modulus: scalar
        """
        nmoduli = len(self.distance_modulus_array)
        npixels    = len(self.roi.pixels_target)
        self.log_likelihood_sparse_array       = numpy.zeros([nmoduli, npixels])
        self.richness_sparse_array             = numpy.zeros([nmoduli, npixels])
        self.richness_lower_sparse_array       = numpy.zeros([nmoduli, npixels])
        self.richness_upper_sparse_array       = numpy.zeros([nmoduli, npixels])
        self.richness_upper_limit_sparse_array = numpy.zeros([nmoduli, npixels])
        self.stellar_mass_sparse_array         = numpy.zeros([nmoduli, npixels])
        self.fraction_observable_sparse_array  = numpy.zeros([nmoduli, npixels])

        # Specific pixel/distance_modulus
        coord_idx, distance_modulus_idx = None, None
        if coords is not None:
            # Match to nearest grid coordinate index
            coord_idx = self.roi.indexTarget(coords[0],coords[1])
        if distance_modulus is not None:
            # Match to nearest distance modulus index
            distance_modulus_idx=np.fabs(self.distance_modulus_array-distance_modulus).argmin()

        lon, lat = self.roi.pixels_target.lon, self.roi.pixels_target.lat
            
        logger.info('Looping over distance moduli in grid search ...')
        for ii, distance_modulus in enumerate(self.distance_modulus_array):

            # Specific pixel
            if distance_modulus_idx is not None:
                if ii != distance_modulus_idx: continue

            logger.info('  (%-2i/%i) Distance Modulus=%.1f ...'%(ii+1,nmoduli,distance_modulus))

            # Set distance_modulus once to save time
            self.loglike.set_params(distance_modulus=distance_modulus)

            for jj in range(0, npixels):
                # Specific pixel
                if coord_idx is not None:
                    if jj != coord_idx: continue

                # Set kernel location
                self.loglike.set_params(lon=lon[jj],lat=lat[jj])
                # Doesn't re-sync distance_modulus each time
                self.loglike.sync_params()
                                         
                args = (jj+1, npixels, self.loglike.source.lon, self.loglike.source.lat)
                message = '    (%-3i/%i) Candidate at (%.2f, %.2f) ... '%(args)

                self.log_likelihood_sparse_array[ii][jj], self.richness_sparse_array[ii][jj], parabola = self.loglike.fit_richness()
                self.stellar_mass_sparse_array[ii][jj] = self.stellar_mass_conversion * self.richness_sparse_array[ii][jj]
                self.fraction_observable_sparse_array[ii][jj] = self.loglike.f
                if self.config['scan']['full_pdf']:
                    #n_pdf_points = 100
                    #richness_range = parabola.profileUpperLimit(delta=25.) - self.richness_sparse_array[ii][jj]
                    #richness = numpy.linspace(max(0., self.richness_sparse_array[ii][jj] - richness_range),
                    #                          self.richness_sparse_array[ii][jj] + richness_range,
                    #                          n_pdf_points)
                    #if richness[0] > 0.:
                    #    richness = numpy.insert(richness, 0, 0.)
                    #    n_pdf_points += 1
                    # 
                    #log_likelihood = numpy.zeros(n_pdf_points)
                    #for kk in range(0, n_pdf_points):
                    #    log_likelihood[kk] = self.loglike.value(richness=richness[kk])
                    #parabola = ugali.utils.parabola.Parabola(richness, 2.*log_likelihood)
                    #self.richness_lower_sparse_array[ii][jj], self.richness_upper_sparse_array[ii][jj] = parabola.confidenceInterval(0.6827)
                    self.richness_lower_sparse_array[ii][jj], self.richness_upper_sparse_array[ii][jj] = self.loglike.richness_interval(0.6827)
                    
                    self.richness_upper_limit_sparse_array[ii][jj] = parabola.bayesianUpperLimit(0.95)

                    args = (
                        2. * self.log_likelihood_sparse_array[ii][jj],
                        self.stellar_mass_conversion*self.richness_sparse_array[ii][jj],
                        self.stellar_mass_conversion*self.richness_lower_sparse_array[ii][jj],
                        self.stellar_mass_conversion*self.richness_upper_sparse_array[ii][jj],
                        self.stellar_mass_conversion*self.richness_upper_limit_sparse_array[ii][jj]
                    )
                    message += 'TS=%.1f, Stellar Mass=%.1f (%.1f -- %.1f @ 0.68 CL, < %.1f @ 0.95 CL)'%(args)
                else:
                    args = (
                        2. * self.log_likelihood_sparse_array[ii][jj], 
                        self.stellar_mass_conversion * self.richness_sparse_array[ii][jj],
                        self.fraction_observable_sparse_array[ii][jj]
                    )
                    message += 'TS=%.1f, Stellar Mass=%.1f, Fraction=%.2g'%(args)
                logger.debug( message )
                
                #if coords is not None and distance_modulus is not None:
                #    results = [self.richness_sparse_array[ii][jj],
                #               self.log_likelihood_sparse_array[ii][jj],
                #               self.richness_lower_sparse_array[ii][jj],
                #               self.richness_upper_sparse_array[ii][jj],
                #               self.richness_upper_limit_sparse_array[ii][jj],
                #               richness, log_likelihood, self.loglike.p, self.loglike.f]
                #    return results

            jj_max = self.log_likelihood_sparse_array[ii].argmax()
            args = (
                jj_max+1, npixels, lon[jj_max], lat[jj_max],
                2. * self.log_likelihood_sparse_array[ii][jj_max], 
                self.stellar_mass_conversion * self.richness_sparse_array[ii][jj_max]
            )
            message = '  (%-3i/%i) Maximum at (%.2f, %.2f) ... TS=%.1f, Stellar Mass=%.1f'%(args)
            logger.info( message )
Beispiel #32
0
def load(args):
    #logger = logging.getLogger()
    infile, columns = args
    logger.debug("Loading %s..." % infile)
    return fitsio.read(infile, columns=columns)
Beispiel #33
0
def writeColorLUT(config,
                  outfile=None, isochrone=None, distance_modulus_array=None,
                  delta_mag=None, mag_err_array=None,
                  mass_steps=1000000, plot=False):
    """
    Precompute a 4-dimensional signal color probability look-up table to speed up the likelihood evaluation.
    Inputs are a Config object (or file name), an Isochrone object,
    an array of distance moduli at which to evaluate the signal color probability,
    and an array of magnitude uncertainties which set the bin edges of those dimensions (zero implicity included).
    Finally there is an outfile name.
    """
    if plot: import ugali.utils.plotting
    if type(config) == str:
        config = ugali.utils.config.Config(config)
    if outfile is None:
        outfile = config.params['color_lut']['filename']
    if isochrone is None:
        isochrones = []
        for ii, name in enumerate(config.params['isochrone']['infiles']):
            isochrones.append(ugali.analysis.isochrone.Isochrone(config, name))
        isochrone = ugali.analysis.isochrone.CompositeIsochrone(isochrones, config.params['isochrone']['weights'])
    if distance_modulus_array is None:
        distance_modulus_array = config.params['color_lut']['distance_modulus_array']
    if delta_mag is None:
        delta_mag = config.params['color_lut']['delta_mag']
    if mag_err_array is None:
        mag_err_array = config.params['color_lut']['mag_err_array']

    mag_buffer = 0.5 # Safety buffer in magnitudes around the color-magnitude space defined by the ROI
    epsilon = 1.e-10
    if config.params['catalog']['band_1_detection']:
        bins_mag_1 = numpy.arange(config.params['mag']['min'] - mag_buffer,
                                  config.params['mag']['max'] + mag_buffer + epsilon,
                                  delta_mag)
        bins_mag_2 = numpy.arange(config.params['mag']['min'] - config.params['color']['max'] - mag_buffer,
                                  config.params['mag']['max'] - config.params['color']['min'] + mag_buffer + epsilon,
                                  delta_mag)
    else:
        bins_mag_1 = numpy.arange(config.params['mag']['min'] + config.params['color']['min'] - mag_buffer,
                                  config.params['mag']['max'] + config.params['color']['max'] + mag_buffer + epsilon,
                                  delta_mag)
        bins_mag_2 = numpy.arange(config.params['mag']['min'] - mag_buffer,
                                  config.params['mag']['max'] + mag_buffer + epsilon,
                                  delta_mag)

    # Output binning configuration
    #print config.params['catalog']['band_1_detection']
    #print config.params['mag']['min'], config.params['mag']['max']
    #print config.params['color']['min'], config.params['color']['max']

    #print bins_mag_1[0], bins_mag_1[-1], len(bins_mag_1)
    #print bins_mag_2[0], bins_mag_2[-1], len(bins_mag_2)

    isochrone_mass_init, isochrone_mass_pdf, isochrone_mass_act, isochrone_mag_1, isochrone_mag_2 = isochrone.sample(mass_steps=mass_steps)

    # make randoms
    randoms_1 = numpy.random.normal(0., 1., len(isochrone_mass_pdf))
    randoms_2 = numpy.random.normal(0., 1., len(isochrone_mass_pdf))

    hdul = pyfits.HDUList()

    for index_distance_modulus, distance_modulus in enumerate(distance_modulus_array):

        logger.debug('(%i/%i)'%(index_distance_modulus, len(distance_modulus_array)))

        columns_array = []
        
        time_start = time.time()
        
        for index_mag_err_1, mag_err_1 in enumerate(mag_err_array):
            for index_mag_err_2, mag_err_2 in enumerate(mag_err_array):
                logger.debug('  (%i/%i) Distance modulus = %.2f mag_err_1 = %.3f mag_err_2 = %.3f'%(index_mag_err_1 * len(mag_err_array) + index_mag_err_2,
                                                                                             len(mag_err_array)**2,
                                                                                             distance_modulus,
                                                                                             mag_err_1,
                                                                                             mag_err_2))
                
                # Add randoms
                histo_isochrone_pdf = numpy.histogram2d(distance_modulus + isochrone_mag_1 + randoms_1 * mag_err_1,
                                                        distance_modulus + isochrone_mag_2 + randoms_2 * mag_err_2,
                                                        bins=[bins_mag_1, bins_mag_2],
                                                        weights=isochrone_mass_pdf)[0]
                
                if plot:
                    # Checked that axis are plotted correctly
                    ugali.utils.plotting.twoDimensionalHistogram('Convolved Isochrone', 'mag_1', 'mag_2',
                                                                 numpy.log10(histo_isochrone_pdf + epsilon).transpose(),
                                                                 bins_mag_1, bins_mag_2,
                                                                 lim_x=None, lim_y=None,
                                                                 vmin=None, vmax=None)
                    raw_input('WAIT')

                columns_array.append(pyfits.Column(name = '%i%i'%(index_mag_err_1, index_mag_err_2),
                                                   format = '%iE'%(histo_isochrone_pdf.shape[1]),
                                                   array = histo_isochrone_pdf))

        hdu = pyfits.new_table(columns_array)
        hdu.header.update('DIST_MOD', distance_modulus)
        hdu.name = '%.2f'%(distance_modulus)
        hdul.append(hdu)

        time_end = time.time()
        logger.debug('%.2f s'%(time_end - time_start))

    # Store distance modulus info
    columns_array = [pyfits.Column(name = 'DISTANCE_MODULUS',
                                   format = 'E',
                                   array = distance_modulus_array)]
    hdu = pyfits.new_table(columns_array)
    hdu.name = 'DISTANCE_MODULUS'
    hdul.append(hdu)

    # Store magnitude error info
    columns_array = [pyfits.Column(name = 'BINS_MAG_ERR',
                                   format = 'E',
                                   array = numpy.insert(mag_err_array, 0, 0.))]
    hdu = pyfits.new_table(columns_array)
    hdu.name = 'BINS_MAG_ERR'
    hdul.append(hdu)

    # Store magnitude 1 info
    columns_array = [pyfits.Column(name = 'BINS_MAG_1',
                                   format = 'E',
                                   array = bins_mag_1)]
    hdu = pyfits.new_table(columns_array)
    hdu.name = 'BINS_MAG_1'
    hdul.append(hdu)

    # Store magnitude 2 info
    columns_array = [pyfits.Column(name = 'BINS_MAG_2',
                                   format = 'E',
                                   array = bins_mag_2)]
    hdu = pyfits.new_table(columns_array)
    hdu.name = 'BINS_MAG_2'
    hdul.append(hdu)

    logger.info('Writing look-up table to %s'%(outfile))
    hdul.writeto(outfile, clobber = True)
Beispiel #34
0
    def download(self,age,metallicity,outdir=None,force=False):
        """
        Check valid parameter range and download isochrones from:
        http://stev.oapd.inaf.it/cgi-bin/cmd
        """
        epsilon = 1e-4
        lage = np.log10(age*1e9)
        lage_min,lage_max = self.defaults['isoc_lage0'],self.defaults['isoc_lage1']
        if not (lage_min-epsilon < lage <lage_max+epsilon):
            msg = 'Age outside of valid range: %g [%g < log(age) < %g]'%(lage,lage_min,lage_max)
            raise RuntimeError(msg)

        z_min,z_max = self.defaults['isoc_z0'],self.defaults['isoc_z1']
        if not (z_min <= metallicity <= z_max):
            msg = 'Metallicity outside of valid range: %g [%g < z < %g]'%(metallicity,z_min,z_max)
            raise RuntimeError(msg)

        survey=self.survey.lower()
        if survey=='des':
            photsys_file='tab_mag_odfnew/tab_mag_decam.dat'
        elif survey=='sdss':
            photsys_file='tab_mag_odfnew/tab_mag_sloan.dat'
        else:
            msg = 'Unrecognized survey: %s'%survey
            raise RuntimeError(msg)

        if outdir is None: outdir = './'
        mkdir(outdir)

        basename = self.params2filename(age,metallicity)
        outfile = os.path.join(outdir,basename)
            
        if os.path.exists(outfile) and not force:
            logger.warning("Found %s; skipping..."%(outfile))
            return

        logger.info("Downloading isochrone: %s (age=%.2fGyr, metallicity=%g)"%(basename,age,metallicity))

        d = dict(self.defaults)
        d['photsys_file'] = photsys_file
        d['isoc_age']     = age * 1e9
        d['isoc_zeta']    = metallicity

        server = 'http://stev.oapd.inaf.it'
        url = server + '/cgi-bin/cmd_%s'%d['cmd_version']
        logger.info("Accessing %s..."%url)

        q = urlencode(d)
        logger.debug(url+'?'+q)
        c = urlopen(url, q).read()
        aa = re.compile('output\d+')
        fname = aa.findall(c)
        if len(fname) > 0:
            out = '{0}/~lgirardi/tmp/{1}.dat'.format(server, fname[0])
            cmd = 'wget %s -O %s'%(out,outfile)
            logger.debug(cmd)
            stdout = subprocess.check_output(cmd,shell=True,stderr=subprocess.STDOUT)
            logger.debug(stdout)
        else:
            #print(c)
            raise RuntimeError('Server Response is incorrect')
Beispiel #35
0
def pixelizeCatalog(infiles, config, force=False):
    """
    Break catalog into chunks by healpix pixel.
    
    Parameters:
    -----------
    infiles : List of input files
    config  : Configuration file
    force   : Overwrite existing files (depricated)
    
    Returns:
    --------
    None
    """
    nside_catalog = config['coords']['nside_catalog']
    nside_pixel = config['coords']['nside_pixel']
    coordsys = config['coords']['coordsys'].upper()
    outdir = mkdir(config['catalog']['dirname'])
    filenames = config.getFilenames()
    lon_field = config['catalog']['lon_field'].upper()
    lat_field = config['catalog']['lat_field'].upper()

    # ADW: It would probably be better (and more efficient) to do the
    # pixelizing and the new column insertion separately.
    for i, filename in enumerate(infiles):
        logger.info('(%i/%i) %s' % (i + 1, len(infiles), filename))
        data = fitsio.read(filename)
        logger.info("%i objects found" % len(data))
        if not len(data): continue

        columns = map(str.upper, data.dtype.names)
        names, arrs = [], []

        if (lon_field in columns) and (lat_field in columns):
            lon, lat = data[lon_field], data[lat_field]
        elif coordsys == 'GAL':
            msg = "Columns '%s' and '%s' not found." % (lon_field, lat_field)
            msg += "\nConverting from RA,DEC"
            logger.warning(msg)
            lon, lat = cel2gal(data['RA'], data['DEC'])
            names += [lon_field, lat_field]
            arrs += [lon, lat]
        elif coordsys == 'CEL':
            msg = "Columns '%s' and '%s' not found." % (lon_field, lat_field)
            msg += "\nConverting from GLON,GLAT"
            lon, lat = gal2cel(data['GLON'], data['GLAT'])
            names += [lon_field, lat_field]
            arrs += [lon, lat]

        cat_pix = ang2pix(nside_catalog, lon, lat)
        pix_pix = ang2pix(nside_pixel, lon, lat)
        cat_pix_name = 'PIX%i' % nside_catalog
        pix_pix_name = 'PIX%i' % nside_pixel

        names += [cat_pix_name, pix_pix_name]
        arrs += [cat_pix, pix_pix]
        data = mlab.rec_append_fields(data, names=names, arrs=arrs)

        for pix in np.unique(cat_pix):
            logger.debug("Processing pixel %s" % pix)

            arr = data[cat_pix == pix]
            outfile = filenames.data['catalog'][pix]

            if not os.path.exists(outfile):
                logger.debug("Creating %s" % outfile)
                out = fitsio.FITS(outfile, mode='rw')
                out.write(arr)

                hdr = healpix.header_odict(nside=nside_catalog,
                                           coord=coordsys[0])
                for key in ['PIXTYPE', 'ORDERING', 'NSIDE', 'COORDSYS']:
                    out[1].write_key(*list(hdr[key].values()))
                out[1].write_key('PIX',
                                 pix,
                                 comment='HEALPIX pixel for this file')
            else:
                out = fitsio.FITS(outfile, mode='rw')
                out[1].append(arr)

            logger.debug("Writing %s" % outfile)
            out.close()
Beispiel #36
0
    def query_server(self, outfile, age, metallicity):
        z = metallicity
        feh = self.z2feh(z)

        params = dict(self.download_defaults)
        params['output'] = dict_output[self.survey]
        params['FeH_value'] = feh
        params['age_value'] = age * 1e9
        if params['age_scale'] == 'log10':
            params['age_value'] = np.log10(params['age_value'])

        server = self.download_url
        url = server + '/iso_form.php'
        # First check that the server is alive
        logger.debug("Accessing %s..." % url)
        urlopen(url, timeout=2)

        #response = requests.post(url,data=params)
        q = urlencode(params).encode('utf-8')
        request = Request(url, data=q)
        response = urlopen(request)
        try:
            fname = os.path.basename(str(response.read()).split('"')[1])
        except Exception as e:
            logger.debug(str(e))
            msg = 'Output filename not found'
            raise RuntimeError(msg)

        tmpdir = os.path.dirname(tempfile.NamedTemporaryFile().name)
        tmpfile = os.path.join(tmpdir, fname)

        out = '{0}/tmp/{1}'.format(server, fname)
        cmd = 'wget --progress dot:binary %s -P %s' % (out, tmpdir)
        logger.debug(cmd)
        stdout = subprocess.check_output(cmd,
                                         shell=True,
                                         stderr=subprocess.STDOUT)
        logger.debug(stdout)

        cmd = 'unzip %s -d %s' % (tmpfile, tmpdir)
        logger.debug(cmd)
        stdout = subprocess.check_output(cmd,
                                         shell=True,
                                         stderr=subprocess.STDOUT)
        logger.debug(stdout)

        logger.debug("Creating %s..." % outfile)
        shutil.move(tmpfile.replace('.zip', '.cmd'), outfile)
        os.remove(tmpfile)

        return outfile
Beispiel #37
0
def load(args):
    infile, columns = args
    logger.debug("Loading %s..." % infile)
    return fitsio.read(infile, columns=columns)
Beispiel #38
0
    def search(self, coords=None, distance_modulus=None, tolerance=1.e-2):
        """
        Organize a grid search over ROI target pixels and distance moduli in distance_modulus_array
        coords: (lon,lat)
        distance_modulus: scalar
        """
        nmoduli = len(self.distance_modulus_array)
        npixels = len(self.roi.pixels_target)
        self.log_likelihood_sparse_array = numpy.zeros([nmoduli, npixels])
        self.richness_sparse_array = numpy.zeros([nmoduli, npixels])
        self.richness_lower_sparse_array = numpy.zeros([nmoduli, npixels])
        self.richness_upper_sparse_array = numpy.zeros([nmoduli, npixels])
        self.richness_upper_limit_sparse_array = numpy.zeros(
            [nmoduli, npixels])
        self.stellar_mass_sparse_array = numpy.zeros([nmoduli, npixels])
        self.fraction_observable_sparse_array = numpy.zeros([nmoduli, npixels])

        # Specific pixel/distance_modulus
        coord_idx, distance_modulus_idx = None, None
        if coords is not None:
            # Match to nearest grid coordinate index
            coord_idx = self.roi.indexTarget(coords[0], coords[1])
        if distance_modulus is not None:
            # Match to nearest distance modulus index
            distance_modulus_idx = np.fabs(self.distance_modulus_array -
                                           distance_modulus).argmin()

        lon, lat = self.roi.pixels_target.lon, self.roi.pixels_target.lat

        logger.info('Looping over distance moduli in grid search ...')
        for ii, distance_modulus in enumerate(self.distance_modulus_array):

            # Specific pixel
            if distance_modulus_idx is not None:
                if ii != distance_modulus_idx: continue

            logger.info('  (%-2i/%i) Distance Modulus=%.1f ...' %
                        (ii + 1, nmoduli, distance_modulus))

            # Set distance_modulus once to save time
            self.loglike.set_params(distance_modulus=distance_modulus)

            for jj in range(0, npixels):
                # Specific pixel
                if coord_idx is not None:
                    if jj != coord_idx: continue

                # Set kernel location
                self.loglike.set_params(lon=lon[jj], lat=lat[jj])
                # Doesn't re-sync distance_modulus each time
                self.loglike.sync_params()

                args = (jj + 1, npixels, self.loglike.source.lon,
                        self.loglike.source.lat)
                message = '    (%-3i/%i) Candidate at (%.2f, %.2f) ... ' % (
                    args)

                self.log_likelihood_sparse_array[ii][
                    jj], self.richness_sparse_array[ii][
                        jj], parabola = self.loglike.fit_richness()
                self.stellar_mass_sparse_array[ii][
                    jj] = self.stellar_mass_conversion * self.richness_sparse_array[
                        ii][jj]
                self.fraction_observable_sparse_array[ii][jj] = self.loglike.f
                if self.config['scan']['full_pdf']:
                    #n_pdf_points = 100
                    #richness_range = parabola.profileUpperLimit(delta=25.) - self.richness_sparse_array[ii][jj]
                    #richness = numpy.linspace(max(0., self.richness_sparse_array[ii][jj] - richness_range),
                    #                          self.richness_sparse_array[ii][jj] + richness_range,
                    #                          n_pdf_points)
                    #if richness[0] > 0.:
                    #    richness = numpy.insert(richness, 0, 0.)
                    #    n_pdf_points += 1
                    #
                    #log_likelihood = numpy.zeros(n_pdf_points)
                    #for kk in range(0, n_pdf_points):
                    #    log_likelihood[kk] = self.loglike.value(richness=richness[kk])
                    #parabola = ugali.utils.parabola.Parabola(richness, 2.*log_likelihood)
                    #self.richness_lower_sparse_array[ii][jj], self.richness_upper_sparse_array[ii][jj] = parabola.confidenceInterval(0.6827)
                    self.richness_lower_sparse_array[ii][
                        jj], self.richness_upper_sparse_array[ii][
                            jj] = self.loglike.richness_interval(0.6827)

                    self.richness_upper_limit_sparse_array[ii][
                        jj] = parabola.bayesianUpperLimit(0.95)

                    args = (2. * self.log_likelihood_sparse_array[ii][jj],
                            self.stellar_mass_conversion *
                            self.richness_sparse_array[ii][jj],
                            self.stellar_mass_conversion *
                            self.richness_lower_sparse_array[ii][jj],
                            self.stellar_mass_conversion *
                            self.richness_upper_sparse_array[ii][jj],
                            self.stellar_mass_conversion *
                            self.richness_upper_limit_sparse_array[ii][jj])
                    message += 'TS=%.1f, Stellar Mass=%.1f (%.1f -- %.1f @ 0.68 CL, < %.1f @ 0.95 CL)' % (
                        args)
                else:
                    args = (2. * self.log_likelihood_sparse_array[ii][jj],
                            self.stellar_mass_conversion *
                            self.richness_sparse_array[ii][jj],
                            self.fraction_observable_sparse_array[ii][jj])
                    message += 'TS=%.1f, Stellar Mass=%.1f, Fraction=%.2g' % (
                        args)
                logger.debug(message)

                #if coords is not None and distance_modulus is not None:
                #    results = [self.richness_sparse_array[ii][jj],
                #               self.log_likelihood_sparse_array[ii][jj],
                #               self.richness_lower_sparse_array[ii][jj],
                #               self.richness_upper_sparse_array[ii][jj],
                #               self.richness_upper_limit_sparse_array[ii][jj],
                #               richness, log_likelihood, self.loglike.p, self.loglike.f]
                #    return results

            jj_max = self.log_likelihood_sparse_array[ii].argmax()
            args = (jj_max + 1, npixels, lon[jj_max], lat[jj_max],
                    2. * self.log_likelihood_sparse_array[ii][jj_max],
                    self.stellar_mass_conversion *
                    self.richness_sparse_array[ii][jj_max])
            message = '  (%-3i/%i) Maximum at (%.2f, %.2f) ... TS=%.1f, Stellar Mass=%.1f' % (
                args)
            logger.info(message)
Beispiel #39
0
    def calculate(self, infile, field=1, simple=False):
        logger.info("Calculating magnitude limit from %s" % infile)

        #manglefile = self.config['mangle']['infile_%i'%field]
        #footfile = self.config['data']['footprint']
        #try:
        #    footprint = fitsio.read(footfile)['I'].ravel()
        #except:
        #    logger.warn("Couldn't open %s; will try again."%footfile)
        #    footprint = footfile

        mag_column = self.config['catalog']['mag_%i_field' % field]
        magerr_column = self.config['catalog']['mag_err_%i_field' % field]

        # For simple maglims
        release = self.config['data']['release'].lower()
        band = self.config['catalog']['mag_%i_band' % field]
        pixel_pix_name = 'PIX%i' % self.nside_pixel

        # If the data already has a healpix pixel assignment then use it
        # Otherwise recalculate...
        try:
            data = fitsio.read(infile, columns=[pixel_pix_name])
        except ValueError as e:
            logger.info(str(e))
            columns = [
                self.config['catalog']['lon_field'],
                self.config['catalog']['lat_field']
            ]
            data = fitsio.read(infile, columns=columns)[columns]
            pix = ang2pix(self.nside_pixel, data[columns[0]], data[columns[1]])
            data = recfuncs.rec_append_fields(data, pixel_pix_name, pix)

        #mask_pixels = np.arange( hp.nside2npix(self.nside_mask), dtype='int')
        mask_maglims = np.zeros(hp.nside2npix(self.nside_mask))

        out_pixels = np.zeros(0, dtype='int')
        out_maglims = np.zeros(0)

        # Find the objects in each pixel
        pixel_pix = data[pixel_pix_name]
        mask_pix = ugali.utils.skymap.superpixel(pixel_pix, self.nside_pixel,
                                                 self.nside_mask)
        count = Counter(mask_pix)
        pixels = sorted(count.keys())
        pix_digi = np.digitize(mask_pix, pixels).argsort()
        idx = 0
        min_num = 500
        signal_to_noise = 10.
        magerr_lim = 1 / signal_to_noise
        for pix in pixels:
            # Calculate the magnitude limit in each pixel
            num = count[pix]
            objs = data[pix_digi[idx:idx + num]]
            idx += num
            if simple:
                # Set constant magnitude limits
                logger.debug("Simple magnitude limit for %s" % infile)
                mask_maglims[pix] = MAGLIMS[release][band]
            elif num < min_num:
                logger.info('Found <%i objects in pixel %i' % (min_num, pix))
                mask_maglims[pix] = 0
            else:
                mag = objs[mag_column]
                magerr = objs[magerr_column]
                # Estimate the magnitude limit as suggested by:
                # https://deswiki.cosmology.illinois.edu/confluence/display/DO/SVA1+Release+Document
                # (https://desweb.cosmology.illinois.edu/confluence/display/Operations/SVA1+Doc)
                maglim = np.median(mag[(magerr > 0.9 * magerr_lim)
                                       & (magerr < 1.1 * magerr_lim)])

                # Alternative method to estimate the magnitude limit by fitting median
                #mag_min, mag_max = mag.min(),mag.max()
                #mag_bins = np.arange(mag_min,mag_max,0.1) #0.1086?
                #x,y = ugali.utils.binning.binnedMedian(mag,magerr,mag_bins)
                #x,y = x[~np.isnan(y)],y[~np.isnan(y)]
                #magerr_med = interp1d(x,y)
                #mag0 = np.median(x)
                #maglim = brentq(lambda a: magerr_med(a)-magerr_lim,x.min(),x.max(),disp=False)
                # Median from just objects near magerr cut

                mask_maglims[pix] = maglim

            logger.debug("%i (n=%i): maglim=%g" %
                         (pix, num, mask_maglims[pix]))
            subpix = ugali.utils.skymap.subpixel(pix, self.nside_mask,
                                                 self.nside_pixel)
            maglims = np.zeros(len(subpix)) + mask_maglims[pix]
            out_pixels = np.append(out_pixels, subpix)
            out_maglims = np.append(out_maglims, maglims)

        # Remove empty pixels
        logger.info("Removing empty pixels")
        idx = np.nonzero(out_maglims > 0)[0]
        out_pixels = out_pixels[idx]
        out_maglims = out_maglims[idx]

        # Remove pixels outside the footprint
        if self.footfile:
            logger.info("Checking footprint against %s" % self.footfile)
            lon, lat = pix2ang(self.nside_pixel, out_pixels)
            if self.config['coords']['coordsys'] == 'gal':
                ra, dec = gal2cel(lon, lat)
            else:
                ra, dec = lon, lat
            footprint = inFootprint(self.footprint, ra, dec)
            idx = np.nonzero(footprint)[0]
            out_pixels = out_pixels[idx]
            out_maglims = out_maglims[idx]

        logger.info("MAGLIM = %.3f +/- %.3f" %
                    (np.mean(out_maglims), np.std(out_maglims)))
        return out_pixels, out_maglims
Beispiel #40
0
    def findObjects(pixels, values, nside, zvalues, rev, good):
        """
        Characterize labelled candidates in a multi-dimensional HEALPix map.
     
        Parameters:
        values    : (Sparse) HEALPix array of data values
        nside     : HEALPix dimensionality
        pixels    : Pixel values associated to (sparse) HEALPix array
        zvalues   : Values of the z-dimension (usually distance modulus)
        rev       : Reverse indices for pixels in each "island"
        good      : Array containg labels for each "island"
     
        Returns:
        objs      : numpy.recarray of object characteristics
        """

        ngood = len(good)
        objs = numpy.recarray((ngood, ),
                              dtype=[
                                  ('LABEL', 'i4'),
                                  ('NPIX', 'i4'),
                                  ('VAL_MAX', 'f4'),
                                  ('IDX_MAX', 'i4'),
                                  ('ZIDX_MAX', 'i4'),
                                  ('PIX_MAX', 'i4'),
                                  ('X_MAX', 'f4'),
                                  ('Y_MAX', 'f4'),
                                  ('Z_MAX', 'f4'),
                                  ('X_CENT', 'f4'),
                                  ('Y_CENT', 'f4'),
                                  ('Z_CENT', 'f4'),
                                  ('X_BARY', 'f4'),
                                  ('Y_BARY', 'f4'),
                                  ('Z_BARY', 'f4'),
                                  ('CUT', 'i2'),
                              ])
        objs['CUT'][:] = 0

        shape = values.shape
        ncol = shape[1]
        for i in range(0, ngood):
            logger.debug("i=%i", i)
            # This code could use some cleanup...
            indices = rev[rev[good[i]]:rev[good[i] + 1]]
            npix = len(indices)
            idx = indices // ncol  # This is the spatial index
            zidx = indices % ncol  # This is the distance index

            pix = pixels[idx]  # This is the healpix pixel
            xval, yval = pix2ang(nside, pix)
            zval = zvalues[zidx]

            objs[i]['LABEL'] = good[i]
            objs[i]['NPIX'] = npix
            logger.debug("LABEL=%i" % objs[i]['LABEL'])
            logger.debug("NPIX=%i" % objs[i]['NPIX'])

            island = values[idx, zidx]
            idxmax = island.argmax()
            xval_max, yval_max, zval_max = xval[idxmax], yval[idxmax], zval[
                idxmax]

            objs[i]['VAL_MAX'] = island[idxmax]
            objs[i]['IDX_MAX'] = idx[idxmax]
            objs[i]['ZIDX_MAX'] = zidx[idxmax]
            objs[i]['PIX_MAX'] = pix[idxmax]
            objs[i]['X_MAX'] = xval_max
            objs[i]['Y_MAX'] = yval_max
            objs[i]['Z_MAX'] = zval_max

            proj = Projector(xval_max, yval_max)
            xpix, ypix = proj.sphereToImage(xval, yval)

            # Projected centroid
            x_cent, y_cent, zval_cent = numpy.average([xpix, ypix, zval],
                                                      axis=1)
            xval_cent, yval_cent = proj.imageToSphere(x_cent, y_cent)
            objs[i]['X_CENT'] = xval_cent
            objs[i]['Y_CENT'] = yval_cent
            objs[i]['Z_CENT'] = zval_cent

            # Projected barycenter
            weights = [island, island, island]
            x_bary, y_bary, zval_bary = numpy.average([xpix, ypix, zval],
                                                      weights=weights,
                                                      axis=1)
            xval_bary, yval_bary = proj.imageToSphere(x_bary, y_bary)
            objs[i]['X_BARY'] = xval_bary
            objs[i]['Y_BARY'] = yval_bary
            objs[i]['Z_BARY'] = zval_bary

        return objs
Beispiel #41
0
 def wrapper(*args, **kwargs):
     logger.debug("Running %r..."%(func.__name__))
     t0=time.time()
     ret = func(*args,**kwargs)
     logger.debug('%4.2fs'%(time.time()-t0))
     return ret
Beispiel #42
0
 def call(self, command):
     logger.debug(command)
     return sub.call(command,shell=True)
Beispiel #43
0
    ebvval = ebv(ra, dec, args.ebv)

    values = [ebvval]
    dtypes = [('EBV', 'f4')]

    logger.info("Calculating extinction %s..." % args.ebv)
    for b in args.bands:
        if b in DESDM:
            band = np.repeat(b, len(ra))
            extname = args.ext + '_%s' % b.upper()
        else:
            band = data[b]
            extname = args.ext

        extval = extinction(ebvval, band)
        values.append(extval)
        dtypes.append((extname, 'f4'))

    out = np.rec.fromarrays(values, dtype=dtypes)

    # Writing...
    if not args.outfile:
        args.outfile = args.infile
    else:
        logger.debug("Copying %s to %s..." % (args.infile, args.outfile))
        shutil.copy(args.infile, args.outfile)

    logger.info("Writing %s..." % args.outfile)
    writefile(args.outfile, out, force=args.force)
    logger.info("Done.")
Beispiel #44
0
 def call(self, command):
     logger.debug(command)
     return sub.call(command, shell=True)
Beispiel #45
0
    def calculate(self, infile, field=1, simple=False):
        logger.info("Calculating magnitude limit from %s"%infile)

        #manglefile = self.config['mangle']['infile_%i'%field]
        footfile = self.config['data']['footprint']

        mag_column = self.config['catalog']['mag_%i_field'%field]
        magerr_column = self.config['catalog']['mag_err_%i_field'%field]

        # For simple maglims
        release = self.config['data']['release'].lower()
        band    = self.config['catalog']['mag_%i_band'%field]
         
        f = pyfits.open(infile)
        header = f[1].header
        data = f[1].data
         
        #mask_pixels = numpy.arange( healpy.nside2npix(self.nside_mask), dtype='int')
        mask_maglims = numpy.zeros( healpy.nside2npix(self.nside_mask) )
         
        out_pixels = numpy.zeros(0,dtype='int')
        out_maglims = numpy.zeros(0)
         
        # Find the objects in each pixel
        pixel_pix = data['PIX%i'%self.nside_pixel]
        mask_pix = ugali.utils.skymap.superpixel(pixel_pix,self.nside_pixel,self.nside_mask)
        count = Counter(mask_pix)
        pixels = sorted(count.keys())
        pix_digi = numpy.digitize(mask_pix,pixels).argsort()
        idx = 0
        min_num = 500
        signal_to_noise = 10.
        magerr_lim = 1/signal_to_noise
        for pix in pixels:
            # Calculate the magnitude limit in each pixel
            num = count[pix]
            objs = data[pix_digi[idx:idx+num]]
            idx += num
            if simple:
                # Set constant magnitude limits
                logger.debug("Simple magnitude limit for %s"%infile)
                mask_maglims[pix] = MAGLIMS[release][band]
            elif num < min_num:
                logger.info('Found <%i objects in pixel %i'%(min_num,pix))
                mask_maglims[pix] = 0
            else:
                mag = objs[mag_column]
                magerr = objs[magerr_column]
                # Estimate the magnitude limit as suggested by:
                # https://deswiki.cosmology.illinois.edu/confluence/display/DO/SVA1+Release+Document
                # (https://desweb.cosmology.illinois.edu/confluence/display/Operations/SVA1+Doc)
                maglim = numpy.median(mag[(magerr>0.9*magerr_lim)&(magerr<1.1*magerr_lim)])
         
                # Alternative method to estimate the magnitude limit by fitting median
                #mag_min, mag_max = mag.min(),mag.max()
                #mag_bins = numpy.arange(mag_min,mag_max,0.1) #0.1086?
                #x,y = ugali.utils.binning.binnedMedian(mag,magerr,mag_bins)
                #x,y = x[~numpy.isnan(y)],y[~numpy.isnan(y)]
                #magerr_med = interp1d(x,y)
                #mag0 = numpy.median(x) 
                #maglim = brentq(lambda a: magerr_med(a)-magerr_lim,x.min(),x.max(),disp=False)
                # Median from just objects near magerr cut
         
                mask_maglims[pix] = maglim

            logger.debug("%i (n=%i): maglim=%g"%(pix,num,mask_maglims[pix]))
            subpix = ugali.utils.skymap.subpixel(pix, self.nside_mask, self.nside_pixel)
            maglims = numpy.zeros(len(subpix)) + mask_maglims[pix] 
            out_pixels = numpy.append(out_pixels,subpix)
            out_maglims = numpy.append(out_maglims,maglims)
         
        # Remove empty pixels
        logger.info("Removing empty pixels")
        idx = numpy.nonzero(out_maglims > 0)[0]
        out_pixels  = out_pixels[idx]
        out_maglims = out_maglims[idx]
         
        # Remove pixels outside the footprint
        logger.info("Checking footprint against %s"%footfile)
        glon,glat = pix2ang(self.nside_pixel,out_pixels)
        ra,dec = gal2cel(glon,glat)
        footprint = inFootprint(footfile,ra,dec)
        idx = numpy.nonzero(footprint)[0]
        out_pixels = out_pixels[idx]
        out_maglims = out_maglims[idx]
         
        logger.info("MAGLIM = %.3f +/- %.3f"%(numpy.mean(out_maglims),numpy.std(out_maglims)))         
        return out_pixels,out_maglims
Beispiel #46
0
    def search(self,
               coords=None,
               distance_modulus=None,
               extension=None,
               tolerance=1.e-2):
        """
        Organize a grid search over ROI target pixels, distance
        moduli, and extensions. If coords, distance_modulus, or
        extension is specified, then the nearest value in the
        predefined scan grid is used. ***This may be different than
        the input value.**

        Parameters
        ----------
        coords : (float,float)
            coordinate to search (matched to nearest scan value)
        distance_modulus : float
            distance modulus to search (matched to nearest scan value)
        extension : float
            extension to search (matched to nearest scan value)
        tolerance : float
            tolerance on richness maximization

        Returns
        -------
        None 
        """
        nmoduli = len(self.distance_modulus_array)
        npixels = len(self.roi.pixels_target)
        self.loglike_array = np.zeros([nmoduli, npixels], dtype='f4')
        self.richness_array = np.zeros([nmoduli, npixels], dtype='f4')
        self.stellar_mass_array = np.zeros([nmoduli, npixels], dtype='f4')
        self.fraction_observable_array = np.zeros([nmoduli, npixels],
                                                  dtype='f4')
        self.extension_fit_array = np.zeros([nmoduli, npixels], dtype='f4')
        # DEPRECATED: ADW 2019-04-27
        self.richness_lower_array = np.zeros([nmoduli, npixels], dtype='f4')
        self.richness_upper_array = np.zeros([nmoduli, npixels], dtype='f4')
        self.richness_ulimit_array = np.zeros([nmoduli, npixels], dtype='f4')

        # Specific pixel/distance_modulus
        coord_idx, distance_modulus_idx, extension_idx = None, None, None
        if coords is not None:
            # Match to nearest grid coordinate index
            coord_idx = self.roi.indexTarget(coords[0], coords[1])
        if distance_modulus is not None:
            # Match to nearest distance modulus index
            distance_modulus_idx = np.fabs(self.distance_modulus_array -
                                           distance_modulus).argmin()
        if extension is not None:
            # Match to nearest extension
            extension_idx = np.fabs(self.extension_array - extension).argmin()

        lon, lat = self.roi.pixels_target.lon, self.roi.pixels_target.lat

        logger.info('Looping over distance moduli in grid search ...')
        for ii, distance_modulus in enumerate(self.distance_modulus_array):
            # Specific distance
            if distance_modulus_idx is not None:
                if ii != distance_modulus_idx: continue

            logger.info('  (%-2i/%i) Distance Modulus=%.1f ...' %
                        (ii + 1, nmoduli, distance_modulus))

            # No objects, continue
            if len(self.loglike.catalog) == 0:
                logger.warn("No catalog objects")
                continue

            # Set distance_modulus once to save time
            self.loglike.set_params(distance_modulus=distance_modulus)
            # Loop over pixels
            for jj in range(0, npixels):
                # Specific pixel
                if coord_idx is not None:
                    if jj != coord_idx: continue

                # Set kernel location
                self.loglike.set_params(lon=lon[jj], lat=lat[jj])

                loglike = 0
                # Loop over extensions
                for kk, ext in enumerate(self.extension_array):
                    # Specific extension
                    if extension_idx is not None:
                        if kk != extension_idx: continue

                    # Set extension
                    self.loglike.set_params(extension=ext)

                    # Doesn't re-sync distance_modulus each time
                    self.loglike.sync_params()

                    # Maximize the likelihood with respect to richness
                    loglike, rich, p = self.loglike.fit_richness()

                    if loglike < self.loglike_array[ii][jj]:
                        # No loglike increase, continue
                        continue

                    self.loglike_array[ii][jj], self.richness_array[ii][
                        jj], parabola = loglike, rich, p
                    self.stellar_mass_array[ii][
                        jj] = self.stellar_mass_conversion * self.richness_array[
                            ii][jj]
                    self.fraction_observable_array[ii][jj] = self.loglike.f
                    self.extension_fit_array[ii][jj] = self.source.extension

                # ADW: Careful, we are leaving the extension at the
                # last value in the array, not at the maximum...

                # Debug output
                args = (jj + 1, npixels, lon[jj], lat[jj],
                        2. * self.loglike_array[ii][jj],
                        self.stellar_mass_array[ii][jj],
                        self.fraction_observable_array[ii][jj],
                        self.extension_fit_array[ii][jj])
                msg = '    (%-3i/%i) Candidate at (%.2f, %.2f) ... '
                msg += 'TS=%.1f, Mstar=%.2g, ObsFrac=%.2g, Ext=%.2g'
                logger.debug(msg % args)
                """
                # This is debugging output
                if self.config['scan']['full_pdf']:
                    DeprecationWarning("'full_pdf' is deprecated.")
                    self.richness_lower_array[ii][jj], self.richness_upper_array[ii][jj] = self.loglike.richness_interval(0.6827)
                    
                    self.richness_ulimit_array[ii][jj] = parabola.bayesianUpperLimit(0.95)

                    args = (
                        2. * self.loglike_array[ii][jj],
                        self.stellar_mass_conversion*self.richness_array[ii][jj],
                        self.stellar_mass_conversion*self.richness_lower_array[ii][jj],
                        self.stellar_mass_conversion*self.richness_upper_array[ii][jj],
                        self.stellar_mass_conversion*self.richness_ulimit_array[ii][jj]
                    )
                    msg = 'TS=%.1f, Stellar Mass=%.1f (%.1f -- %.1f @ 0.68 CL, < %.1f @ 0.95 CL)'%(args)
                    logger.debug(msg)
                """

            jj_max = self.loglike_array[ii].argmax()
            args = (jj_max + 1, npixels, lon[jj_max], lat[jj_max],
                    2. * self.loglike_array[ii][jj_max],
                    self.stellar_mass_conversion *
                    self.richness_array[ii][jj_max],
                    self.extension_fit_array[ii][jj_max])
            msg = '  (%-3i/%i) Max at (%.2f, %.2f) : TS=%.1f, Mstar=%.2g, Ext=%.2f' % (
                args)
            logger.info(msg)
Beispiel #47
0
    def sample(self, mode='data', mass_steps=1000, mass_min=0.1, full_data_range=False):
        """Sample the isochrone in steps of mass interpolating between the
        originally defined isochrone points.

        Parameters:
        -----------
        mode : 
        mass_steps : 
        mass_min : Minimum mass [Msun]
        full_data_range :
        
        Returns:
        --------
        mass_init : Initial mass of each point
        mass_pdf : PDF of number of stars in each point
        mass_act : Actual (current mass) of each stellar point
        mag_1 : Array of magnitudes in first band (distance modulus applied)
        mag_2 : Array of magnitudes in second band (distance modulus applied)
        """

        if full_data_range:
            # ADW: Might be depricated 02/10/2015
            # Generate points over full isochrone data range
            select = slice(None)
        else:
            # Not generating points for the post-AGB stars,
            # but still count those stars towards the normalization
            select = slice(self.index)

        mass_steps = int(mass_steps)

        mass_init = self.mass_init[select]
        mass_act = self.mass_act[select]
        mag_1 = self.mag_1[select]
        mag_2 = self.mag_2[select]
        
        # ADW: Assume that the isochrones are pre-sorted by mass_init
        # This avoids some numerical instability from points that have the same
        # mass_init value (discontinuities in the isochrone).
        # ADW: Might consider using np.interp for speed
        mass_act_interpolation = scipy.interpolate.interp1d(mass_init, mass_act,assume_sorted=True)
        mag_1_interpolation = scipy.interpolate.interp1d(mass_init, mag_1,assume_sorted=True)
        mag_2_interpolation = scipy.interpolate.interp1d(mass_init, mag_2,assume_sorted=True)

        # ADW: Any other modes possible?
        if mode=='data':
            # Mass interpolation with uniform coverage between data points from isochrone file 
            mass_interpolation = scipy.interpolate.interp1d(np.arange(len(mass_init)), mass_init)
            mass_array = mass_interpolation(np.linspace(0, len(mass_init)-1, mass_steps+1))
            d_mass = mass_array[1:] - mass_array[:-1]
            mass_init_array = np.sqrt(mass_array[1:] * mass_array[:-1])
            mass_pdf_array = d_mass * self.imf.pdf(mass_init_array, log_mode=False)
            mass_act_array = mass_act_interpolation(mass_init_array)
            mag_1_array = mag_1_interpolation(mass_init_array)
            mag_2_array = mag_2_interpolation(mass_init_array)

        # Horizontal branch dispersion
        if self.hb_spread and (self.stage==self.hb_stage).any():
            logger.debug("Performing dispersion of horizontal branch...")
            mass_init_min = self.mass_init[self.stage==self.hb_stage].min()
            mass_init_max = self.mass_init[self.stage==self.hb_stage].max()
            cut = (mass_init_array>mass_init_min)&(mass_init_array<mass_init_max)
            if isinstance(self.hb_spread,collections.Iterable):
                # Explicit dispersion spacing
                dispersion_array = self.hb_spread
                n = len(dispersion_array)
            else:
                # Default dispersion spacing
                dispersion = self.hb_spread
                spacing = 0.025
                n = int(round(2.0*self.hb_spread/spacing))
                if n % 2 != 1: n += 1
                dispersion_array = np.linspace(-dispersion, dispersion, n)

            # Reset original values
            mass_pdf_array[cut] = mass_pdf_array[cut] / float(n)

            # Isochrone values for points on the HB
            mass_init_hb = mass_init_array[cut]
            mass_pdf_hb = mass_pdf_array[cut]
            mass_act_hb = mass_act_array[cut]
            mag_1_hb = mag_1_array[cut]
            mag_2_hb = mag_2_array[cut]

            # Add dispersed values
            for dispersion in dispersion_array:
                if dispersion == 0.: continue
                msg = 'Dispersion=%-.4g, HB Points=%i, Iso Points=%i'%(dispersion,cut.sum(),len(mass_init_array))
                logger.debug(msg)

                mass_init_array = np.append(mass_init_array, mass_init_hb) 
                mass_pdf_array = np.append(mass_pdf_array, mass_pdf_hb)
                mass_act_array = np.append(mass_act_array, mass_act_hb) 
                mag_1_array = np.append(mag_1_array, mag_1_hb + dispersion)
                mag_2_array = np.append(mag_2_array, mag_2_hb + dispersion)

        # Note that the mass_pdf_array is not generally normalized to unity
        # since the isochrone data range typically covers a different range
        # of initial masses
        #mass_pdf_array /= np.sum(mass_pdf_array) # ORIGINAL
        # Normalize to the number of stars in the satellite with mass > mass_min
        mass_pdf_array /= self.imf.integrate(mass_min, self.mass_init_upper_bound)
        out = np.vstack([mass_init_array,mass_pdf_array,mass_act_array,mag_1_array,mag_2_array])
        return out
Beispiel #48
0
def validateSatellite(config, isochrone, kernel, stellar_mass, distance_modulus, trials=1, debug=False, seed=0):
    """
    Tool for simple MC validation studies -- specifically to create multiple realizations of
    a satellite given an CompositeIsochrone object, Kernel object, stellar mass (M_sol) for normalization,
    and distance_modulus.
    """
    logger.info('=== Validate Satellite ===')

    config.params['kernel']['params'] = [kernel.r_h] # TODO: Need better solution to update size??
    logger.debug('Using Plummer profile spatial model with half-light radius %.2f deg'%(config.params['kernel']['params'][0]))
    roi = ugali.observation.roi.ROI(config, kernel.lon, kernel.lat)
    simulator = ugali.simulation.simulator.Simulator(config, roi=roi)
    catalog_base = ugali.observation.catalog.Catalog(config,roi=roi)
    mask = ugali.observation.mask.Mask(config, roi)

    coords = (kernel.lon, kernel.lat)
    
    results = {'mc_lon': [],
               'mc_lat': [],
               'mc_distance_modulus': [],
               'mc_stellar_mass': [],
               'mc_radius': [],
               'richness': [],
               'log_likelihood': [],
               'richness_lower': [],
               'richness_upper': [],
               'richness_limit': [],
               'f': [],
               'stellar_mass': []}

    numpy.random.seed(seed)

    for ii in range(0, trials):
        logger.info('=== Running Satellite %i ==='%ii)

        # Simulate
        catalog_satellite = simulator.satellite(isochrone, kernel, stellar_mass, distance_modulus, mc_source_id=1)
        #catalog_bootstrap = catalog_base.bootstrap()
        #catalog_merge = ugali.observation.catalog.mergeCatalogs([catalog_bootstrap, catalog_satellite])
        catalog_background = simulator.satellite(mc_source_id=1)
        catalog_merge = ugali.observation.catalog.mergeCatalogs([catalog_background, catalog_satellite])

        # Analyze
        likelihood = ugali.analysis.likelihood.Likelihood(config, roi, mask, catalog_merge, isochrone, kernel)
                                                               
        likelihood.precomputeGridSearch([distance_modulus])
        richness, log_likelihood, richness_lower, richness_upper, richness_upper_limit, richness_raw, log_likelihood_raw, p, f = likelihood.gridSearch(coords=coords, distance_modulus_index=0)

        results['mc_lon'].append(kernel.lon)
        results['mc_lat'].append(kernel.lat)
        results['mc_distance_modulus'].append(distance_modulus)
        results['mc_stellar_mass'].append(stellar_mass)
        results['mc_radius'].append(kernel.r_h)
        results['richness'].append(richness)
        results['log_likelihood'].append(log_likelihood)
        results['richness_lower'].append(richness_lower)
        results['richness_upper'].append(richness_upper)
        results['richness_limit'].append(richness_upper_limit)
        results['f'].append(f)
        results['stellar_mass'].append(richness * isochrone.stellarMass())

        logger.info('MC Stellar Mass = %.2f, Measured Stellar Mass = %.2f'%(stellar_mass,richness * isochrone.stellarMass()))
        if debug:
            return likelihood, richness, log_likelihood, richness_lower, richness_upper, richness_upper_limit, richness_raw, log_likelihood_raw, p, f

    return results
Beispiel #49
0
    def get_results(self, **kwargs):
        kwargs.setdefault('alpha', self.alpha)
        kwargs.setdefault('burn', self.nburn * self.nwalkers)

        # Calculate best-fit parameters from MCMC chain
        logger.debug('Estimating parameters...')
        estimate = self.estimate_params(**kwargs)
        params = {k: v[0] for k, v in estimate.items()}
        results = dict(estimate)

        # Extra parameters from the MCMC chain
        logger.debug('Estimating auxiliary parameters...')
        try:
            results['ra'] = self.estimate('ra', **kwargs)
            results['dec'] = self.estimate('dec', **kwargs)
        except KeyError:
            logger.warn("Didn't find 'ra' or 'dec'")
            ra, dec = gal2cel(results['lon'][0], results['lat'][0])
            results['ra'] = ugali.utils.stats.interval(ra)
            results['dec'] = ugali.utils.stats.interval(dec)

        ra, dec = results['ra'][0], results['dec'][0]
        glon, glat = lon, lat = results['lon'][0], results['lat'][0]
        results.update(gal=[float(glon), float(glat)])
        results.update(cel=[float(ra), float(dec)])

        try:
            results['position_angle_cel'] = self.estimate(
                'position_angle_cel', **kwargs)
        except KeyError:
            results['position_angle_cel'] = ugali.utils.stats.interval(np.nan)

        # Update the loglike to the best-fit parameters from the chain
        logger.debug('Calculating TS...')
        ts = 2 * self.loglike.value(**params)
        results['ts'] = ugali.utils.stats.interval(ts, np.nan, np.nan)

        #lon,lat = estimate['lon'][0],estimate['lat'][0]
        #
        #results.update(gal=[float(lon),float(lat)])
        #ra,dec = gal2cel(lon,lat)
        #results.update(cel=[float(ra),float(dec)])
        #results['ra'] = ugali.utils.stats.interval(ra,np.nan,np.nan)
        #results['dec'] = ugali.utils.stats.interval(dec,np.nan,np.nan)

        # Celestial position angle
        # Break ambiguity in direction with '% 180.'
        pa, pa_err = results['position_angle']
        pa_cel = gal2cel_angle(lon, lat, pa) % 180.
        pa_cel_err = np.array(pa_err) - pa + pa_cel
        results['position_angle_cel'] = ugali.utils.stats.interval(
            pa_cel, pa_cel_err[0], pa_cel_err[1])

        mod, mod_err = estimate['distance_modulus']
        dist = mod2dist(mod)
        dist_lo, dist_hi = [mod2dist(mod_err[0]), mod2dist(mod_err[1])]
        results['distance'] = ugali.utils.stats.interval(
            dist, dist_lo, dist_hi)
        dist, dist_err = results['distance']

        ext, ext_err = estimate['extension']
        ext_sigma = np.nan_to_num(np.array(ext_err) - ext)
        results['extension_arcmin'] = ugali.utils.stats.interval(
            60 * ext, 60 * ext_err[0], 60 * ext_err[1])

        # Radially symmetric extension (correct for ellipticity).
        ell, ell_err = estimate['ellipticity']
        rext, rext_err = ext * np.sqrt(1 -
                                       ell), np.array(ext_err) * np.sqrt(1 -
                                                                         ell)
        rext_sigma = np.nan_to_num(np.array(rext_err) - rext)
        results['extension_radial'] = ugali.utils.stats.interval(
            rext, rext_err[0], rext_err[1])
        results['extension_radial_arcmin'] = ugali.utils.stats.interval(
            60 * rext, 60 * rext_err[0], 60 * rext_err[1])

        # Bayes factor for ellipticity
        results['ellipticity_bayes_factor'] = self.bayes_factor(
            'ellipticity', burn=kwargs['burn'])

        # Physical Size (should do this with the posteriors)
        # Radially symmetric
        dist_sigma = np.nan_to_num(np.array(dist_err) - dist)

        size = np.arctan(np.radians(ext)) * dist
        size_sigma = size * np.sqrt((ext_sigma / ext)**2 +
                                    (dist_sigma / dist)**2)
        size_err = [size - size_sigma[0], size + size_sigma[1]]
        results['physical_size'] = ugali.utils.stats.interval(
            size, size_err[0], size_err[1])

        rsize = np.arctan(np.radians(rext)) * dist
        rsize_sigma = rsize * np.sqrt((rext_sigma / rext)**2 +
                                      (dist_sigma / dist)**2)
        rsize_err = [rsize - rsize_sigma[0], rsize + rsize_sigma[1]]
        results['physical_size_radial'] = ugali.utils.stats.interval(
            rsize, rsize_err[0], rsize_err[1])

        # Richness
        rich, rich_err = estimate['richness']

        # Number of observed stars (sum of p-values)
        nobs = self.loglike.p.sum()
        nobs_lo, nobs_hi = nobs + np.sqrt(nobs) * np.array([-1, 1])
        results['nobs'] = ugali.utils.stats.interval(nobs, nobs_lo, nobs_hi)

        # Number of predicted stars (pixelization effects?)
        npred = self.loglike.f * rich
        npred_lo, npred_hi = rich_err[0] * self.loglike.f, rich_err[
            1] * self.loglike.f
        results['npred'] = ugali.utils.stats.interval(npred, npred_lo,
                                                      npred_hi)

        # Careful, depends on the isochrone...
        stellar_mass = self.source.stellar_mass()
        mass = rich * stellar_mass
        mass_lo, mass_hi = rich_err[0] * stellar_mass, rich_err[
            1] * stellar_mass
        results['mass'] = ugali.utils.stats.interval(mass, mass_lo, mass_hi)

        stellar_luminosity = self.source.stellar_luminosity()
        lum = rich * stellar_luminosity
        lum_lo, lum_hi = rich_err[0] * stellar_luminosity, rich_err[
            1] * stellar_luminosity
        results['luminosity'] = ugali.utils.stats.interval(lum, lum_lo, lum_hi)

        Mv = self.source.absolute_magnitude(rich)
        Mv_lo = self.source.absolute_magnitude(rich_err[0])
        Mv_hi = self.source.absolute_magnitude(rich_err[1])
        results['Mv'] = ugali.utils.stats.interval(Mv, Mv_lo, Mv_hi)

        # ADW: WARNING this is very fragile.
        # Also, this is not quite right, should cut on the CMD available space
        kwargs = dict(richness=rich,
                      mag_bright=16.,
                      mag_faint=23.,
                      n_trials=5000,
                      alpha=self.alpha,
                      seed=0)
        martin = self.config['results'].get('martin')
        if martin:
            logger.info("Calculating Martin magnitude...")
            if martin > 1: kwargs['n_trials'] = martin
            Mv_martin = self.source.isochrone.absolute_magnitude_martin(
                **kwargs)
            results['Mv_martin'] = Mv_martin
        else:
            logger.warning("Skipping Martin magnitude")
            results['Mv_martin'] = np.nan

        mu = surfaceBrightness(Mv, size, dist)
        results['surface_brightness'] = ugali.utils.stats.interval(
            mu, np.nan, np.nan)

        try:
            results['constellation'] = ugali.utils.projector.ang2const(
                lon, lat)[1]
        except:
            pass
        results['iau'] = ugali.utils.projector.ang2iau(lon, lat)

        coord = SkyCoord(ra * u.deg, dec * u.deg, distance=dist * u.kpc)
        results['ra_sex'] = str(coord.ra.to_string())
        results['dec_sex'] = str(coord.dec.to_string())

        # Calculate some separations from GC, LMC, SMC
        #NED coordinates with de Grisj distance
        LMC = SkyCoord(80.8939 * u.deg,
                       -69.7561 * u.deg,
                       distance=49.89 * u.kpc)
        #NED coordinates with de Grisj distance
        SMC = SkyCoord(13.1866 * u.deg,
                       -72.8286 * u.deg,
                       distance=61.94 * u.kpc)
        # GC from astropy?
        GC = SkyCoord(266.4168262 * u.deg,
                      -29.0077969 * u.deg,
                      distance=8.0 * u.kpc)

        results['d_gc'] = coord.separation_3d(GC).value
        results['d_lmc'] = coord.separation_3d(LMC).value
        results['d_smc'] = coord.separation_3d(SMC).value

        try:
            results['feh'] = float(self.source.isochrone.feh)
        except:
            results['feh'] = np.nan

        output = dict()
        output['params'] = params
        output['results'] = results
        return output
def run(self):
    outdir=mkdir(self.config['output']['simdir'])
    logdir=mkdir(join(outdir,'log'))

    if 'simulate' in self.opts.run:
        logger.info("Running 'simulate'...")

        if self.opts.num is None: self.opts.num = self.config['simulator']['njobs']
        for i in range(self.opts.num):
            outfile=join(outdir,self.config['output']['simfile']%i)
            base = splitext(os.path.basename(outfile))[0]
            logfile=join(logdir,base+'.log')
            jobname=base
            script = self.config['simulator']['script']
            cmd='%s %s %s --seed %i'%(script,self.opts.config,outfile,i)
            #cmd='%s %s %s'%(script,self.opts.config,outfile)
            self.batch.submit(cmd,jobname,logfile)
            time.sleep(0.1)

    if 'analyze' in self.opts.run:
        logger.info("Running 'analyze'...")
        dirname = self.config['simulate']['dirname']
        catfiles = sorted(glob.glob(join(dirname,self.config['simulate']['catfile'])))
        popfile = join(dirname,self.config['simulate']['popfile'])
        batch = self.config['simulate']['batch']

        for i,catfile in enumerate(catfiles):
            basename = os.path.basename(catfile)
            outfile = join(outdir,basename)

            if exists(outfile) and not self.opts.force:
                logger.info("Found %s; skipping..."%outfile)
                continue

            base = splitext(os.path.basename(outfile))[0]
            logfile=join(logdir,base+'.log')
            jobname=base
            script = self.config['simulate']['script']
            cmd='%s %s -p %s -c %s -o %s'%(script,self.opts.config,popfile,catfile,outfile)
            self.batch.max_jobs = batch.get('max_jobs',200)
            opts = batch.get(self.opts.queue,dict())
            self.batch.submit(cmd,jobname,logfile,**opts)
            time.sleep(0.1)
        
    if 'sensitivity' in self.opts.run:
        logger.info("Running 'sensitivity'...")

    if 'merge' in self.opts.run:
        logger.info("Running 'merge'...")

        filenames=join(outdir,self.config['output']['simfile']).split('_%')[0]+'_*'
        infiles=sorted(glob.glob(filenames))

        f = fitsio.read(infiles[0])
        table = np.empty(0,dtype=data.dtype)
        for filename in infiles:
            logger.debug("Reading %s..."%filename)
            d = fitsio.read(filename)
            t = d[~np.isnan(d['ts'])]
            table = recfuncs.stack_arrays([table,t],usemask=False,asrecarray=True)

        logger.info("Found %i simulations."%len(table))
        outfile = join(outdir,"merged_sims.fits")
        logger.info("Writing %s..."%outfile)
        fitsio.write(outfile,table,clobber=True)
        
    if 'plot' in self.opts.run:
        logger.info("Running 'plot'...")
        import ugali.utils.plotting
        import pylab as plt

        plotdir = mkdir(self.config['output']['plotdir'])

        data = fitsio.read(join(outdir,"merged_sims.fits"))
        data = data[~np.isnan(data['ts'])]
        
        bigfig,bigax = plt.subplots()
        
        for dist in np.unique(data['fit_distance']):
            logger.info('  Plotting distance: %s'%dist)
            ts = data['ts'][data['fit_distance'] == dist]
            ugali.utils.plotting.drawChernoff(bigax,ts,bands='none',color='gray')
            
            fig,ax = plt.subplots(1,2,figsize=(10,5))
            ugali.utils.plotting.drawChernoff(ax[0],ts,bands='none',pdf=True)
            ugali.utils.plotting.drawChernoff(ax[1],ts)
            fig.suptitle(r'Chernoff ($\mu = %g$)'%dist)
            ax[0].annotate(r"$N=%i$"%len(ts), xy=(0.15,0.85), xycoords='axes fraction', 
                           bbox={'boxstyle':"round",'fc':'1'})
            basename = 'chernoff_u%g.png'%dist
            outfile = os.path.join(plotdir,basename)
            plt.savefig(outfile)
        bigfig.suptitle('Chernoff!')
        basename = 'chernoff_all.png'
        outfile = os.path.join(plotdir,basename)
        plt.savefig(outfile)

        #idx=np.random.randint(len(data['ts'])-1,size=400)
        #idx=slice(400)
        #ugali.utils.plotting.plotChernoff(data['ts'][idx])
        #ugali.utils.plotting.plotChernoff(data['fit_ts'])
        plt.ion()
        """
Beispiel #51
0
def mergeSparseHealpixMaps(infiles,
                           outfile=None,
                           pix_data_extension='PIX_DATA',
                           pix_field='PIX',
                           distance_modulus_extension='DISTANCE_MODULUS',
                           distance_modulus_field='DISTANCE_MODULUS',
                           default_value=healpy.UNSEEN):
    """
    Use the first infile to determine the basic contents to expect for the other files.
    """
    # Setup
    if isinstance(infiles, str): infiles = [infiles]

    distance_modulus_array = None
    pix_array = []
    data_dict = {}

    reader = pyfits.open(infiles[0])
    nside = reader[pix_data_extension].header['NSIDE']

    for ii in range(0, len(reader)):
        if reader[ii].name == distance_modulus_extension:
            distance_modulus_array = reader[
                distance_modulus_extension].data.field(distance_modulus_field)

    for key in reader[pix_data_extension].data.names:
        if key == pix_field:
            continue
        data_dict[key] = []
        #if distance_modulus_array is None:
        #    data_dict[key] = default_value * numpy.ones(healpy.nside2npix(nside))
        #else:
        #    data_dict[key] = default_value * numpy.ones([len(distance_modulus_array),
        #                                                 healpy.nside2npix(nside)])
    reader.close()

    # Now loop over the infiles

    for ii in range(0, len(infiles)):
        logger.debug('(%i/%i) %s' % (ii + 1, len(infiles), infiles[ii]))

        reader = pyfits.open(infiles[ii])
        distance_modulus_array_current = numpy.array(
            reader[distance_modulus_extension].data.field(
                distance_modulus_field),
            copy=True)
        if not numpy.array_equal(distance_modulus_array_current,
                                 distance_modulus_array):
            logger.warning("Distance moduli do not match; skipping...")
            continue
        reader.close()

        pix_array_current = readSparseHealpixMap(infiles[ii],
                                                 pix_field,
                                                 extension=pix_data_extension,
                                                 construct_map=False)[0]
        pix_array.append(pix_array_current)

        for key in data_dict.keys():
            value_array_current = readSparseHealpixMap(
                infiles[ii],
                key,
                extension=pix_data_extension,
                construct_map=False)[1]
            data_dict[key].append(value_array_current)
            #if distance_modulus_array is None:
            #    data_dict[key][pix_array_current] = value
            #else:
            #    for jj in range(0, len(distance_modulus_array)):
            #        data_dict[key][jj] = value[jj]

        gc.collect()

    pix_master = numpy.concatenate(pix_array)
    n_conflicting_pixels = len(pix_master) - len(numpy.unique(pix_master))
    if n_conflicting_pixels != 0:
        logger.warning('%i conflicting pixels during merge.' %
                       (n_conflicting_pixels))

    for key in data_dict.keys():
        if distance_modulus_array is not None:
            data_dict[key] = numpy.concatenate(data_dict[key],
                                               axis=1).transpose()
        else:
            data_dict[key] = numpy.concatenate(data_dict[key])

    if outfile is not None:
        writeSparseHealpixMap(pix_master,
                              data_dict,
                              nside,
                              outfile,
                              distance_modulus_array=distance_modulus_array,
                              coordsys='NULL',
                              ordering='NULL')
    else:
        return data_dict
Beispiel #52
0
def load_file(kwargs):
    """ Load a FITS file with kwargs. """
    logger.debug("Loading %s..." % kwargs['filename'])
    return fitsio.read(**kwargs)
Beispiel #53
0
def pixelize(infiles,
             outdir='hpx',
             outbase=HPXBASE,
             nside=16,
             gzip=False,
             force=False,
             float32=False):
    """
    Break catalog up into a set of healpix files.
    """

    mkdir(outdir)
    outfiles = glob.glob(outdir + '/*.fits')
    if len(outfiles) and not force:
        msg = "Found files: %s" % glob.glob(outdir + '/*.fits')
        raise Exception(msg)

    #if len(outfiles):
    #    print("Removing existing files...")
    #    map(os.remove,outfiles)

    for ii, infile in enumerate(infiles):
        logger.info('(%i/%i) %s' % (ii + 1, len(infiles), infile))
        data = readfile(infile, float32)
        if data is None: continue

        catalog_pix = ang2pix(nside, data['RA'], data['DEC'])
        #### Add object pixel (hack to get correct byte order)
        #object_pix = ang2pix(NSIDE_OBJ,data['RA'],data['DEC'],nest=True)
        #name = 'HPX%i'%NSIDE_OBJ; dtype = '>i4'
        #data = recfuncs.rec_append_fields(data,name,object_pix,dtype)

        for pix in np.unique(catalog_pix):
            logger.debug("Processing pixel %s" % pix)

            if 'BAND' in data.dtype.names:
                band = np.unique(data['BAND'])
                if len(band) != 1 and not force:
                    msg = "Found bands: %s" % band
                    raise Exception(msg)
                band = ','.join([b.strip() for b in band])
            else:
                band = 'None'

            outfile = os.path.join(outdir, outbase % pix)
            arr = data[catalog_pix == pix]

            if not os.path.exists(outfile):
                logger.debug("Creating %s" % outfile)
                out = fitsio.FITS(outfile, mode='rw')
                out.write(arr)
                out[1].write_key('COORDSYS',
                                 'CEL',
                                 comment='Coordinate system')
                out[1].write_key('ORDERING',
                                 'RING',
                                 comment='HEALPix ordering scheme')
                out[1].write_key('NSIDE', nside, comment='HEALPix nside')
                out[1].write_key('HPX', pix, comment='HEALPix pixel (RING)')
                out[1].write_key('BAND', band, comment='Photometric band')
            else:
                out = fitsio.FITS(outfile, mode='rw')
                out[1].append(arr)

            logger.debug("Writing %s" % outfile)
            out.close()