def _parse(self,filename): """ Reads an isochrone in the Dotter format and determines the age (log10 yrs and Gyr), metallicity (Z and [Fe/H]), and creates arrays with the initial stellar mass and corresponding magnitudes for each step along the isochrone. http://stellar.dartmouth.edu/models/isolf_new.html """ try: columns = self.columns[self.survey.lower()] except KeyError as e: logger.warning('Unrecognized survey: %s'%(survey)) raise(e) kwargs = dict(comments='#',usecols=list(columns.keys()),dtype=list(columns.values())) self.data = np.genfromtxt(filename,**kwargs) # KCB: Not sure whether the mass in Dotter isochrone output # files is initial mass or current mass self.mass_init = self.data['mass'] self.mass_act = self.data['mass'] self.luminosity = 10**self.data['log_lum'] self.mag_1 = self.data[self.band_1] self.mag_2 = self.data[self.band_2] self.stage = np.tile('Main', len(self.data)) # KCB: No post-AGB isochrone data points, right? self.mass_init_upper_bound = np.max(self.mass_init) self.mag = self.mag_1 if self.band_1_detection else self.mag_2 self.color = self.mag_1 - self.mag_2
def __init__(self, config, default=None): """ Initialize a configuration object from a filename or a dictionary. Provides functionality to merge with a default configuration. Parameters: config: filename, dict, or Config object (deep copied) default: default configuration to merge Returns: config """ self.update(self._load(default)) self.update(self._load(config)) self._formatFilepaths() # For back-compatibility... self.params = self # Run some basic validation # ADW: This should be run after creating filenames self._validate() # Filenames from this config (masked by existence) # ADW: We should not recreate filenames if they already exist # in the input config if not hasattr(self,'filenames'): try: self.filenames = self._createFilenames() except: exc_type,exc_value,exc_traceback = sys.exc_info() logger.warning("%s %s"%(exc_type,exc_value)) logger.warning("Filenames could not be created for config.")
def inFootprint(footprint,ra,dec): """ Check if set of ra,dec combinations are in footprint. Careful, input files must be in celestial coordinates. filename : Either healpix map or mangle polygon file ra,dec : Celestial coordinates Returns: inside : boolean array of coordinates in footprint """ if footprint is None: return np.ones(len(ra),dtype=bool) try: if isinstance(footprint,str) and os.path.exists(footprint): filename = footprint #footprint = hp.read_map(filename,verbose=False) #footprint = fitsio.read(filename)['I'].ravel() footprint = read_map(filename) nside = hp.npix2nside(len(footprint)) pix = ang2pix(nside,ra,dec) inside = (footprint[pix] > 0) except IOError: logger.warning("Failed to load healpix footprint; trying to use mangle...") inside = inMangle(filename,ra,dec) return inside
def _parse(self, filename): """Reads an isochrone file in the Padova (Marigo et al. 2017) format. Creates arrays with the initial stellar mass and corresponding magnitudes for each step along the isochrone. """ try: columns = self.columns[self.survey.lower()] except KeyError as e: logger.warning('Unrecognized survey: %s' % (self.survey)) raise (e) kwargs = dict(usecols=list(columns.keys()), dtype=list(columns.values())) self.data = np.genfromtxt(filename, **kwargs) # cut out anomalous point: # https://github.com/DarkEnergySurvey/ugali/issues/29 self.data = self.data[self.data['stage'] != 9] self.mass_init = self.data['mass_init'] self.mass_act = self.data['mass_act'] self.luminosity = 10**self.data['log_lum'] self.mag_1 = self.data[self.band_1] self.mag_2 = self.data[self.band_2] self.stage = self.data['stage'] self.mass_init_upper_bound = np.max(self.mass_init) self.index = len(self.mass_init) self.mag = self.mag_1 if self.band_1_detection else self.mag_2 self.color = self.mag_1 - self.mag_2
def __init__(self, config, observation, source): # Currently assuming that input mask is ROI-specific self.config = Config(config) self.roi = observation.roi self.mask = observation.mask self.catalog_full = observation.catalog self.clip_catalog() # The source model (includes kernel and isochrone) self.source = source # Effective bin size in color-magnitude space self.delta_mag = self.config['likelihood']['delta_mag'] self.spatial_only = self.config['likelihood'].get( 'spatial_only', False) self.color_only = self.config['likelihood'].get('color_only', False) if self.spatial_only and self.color_only: msg = "Both 'spatial_only' and 'color_only' set" logger.error(msg) raise ValueError(msg) elif self.spatial_only: logger.warning( "Likelihood calculated from spatial information only!!!") elif self.color_only: logger.warning( "Likelihood calculated from color information only!!!") self.calc_background()
def inFootprint(footprint,ra,dec): """ Check if set of ra,dec combinations are in footprint. Careful, input files must be in celestial coordinates. filename : Either healpix map or mangle polygon file ra,dec : Celestial coordinates Returns: inside : boolean array of coordinates in footprint """ if footprint is None: return np.ones(len(ra),dtype=bool) try: if isinstance(footprint,str) and os.path.exists(footprint): filename = footprint #footprint = hp.read_map(filename,verbose=False) #footprint = fitsio.read(filename)['I'].ravel() footprint = read_map(filename) nside = hp.npix2nside(len(footprint)) pix = ang2pix(nside,ra,dec) inside = (footprint[pix] > 0) except IOError: logger.warning("Failed to load healpix footprint; trying to use mangle...") inside = inMangle(filename,ra,dec) return inside
def _parse(self, filename): """Reads an isochrone file in the Padova (Bressan et al. 2012) format. Creates arrays with the initial stellar mass and corresponding magnitudes for each step along the isochrone. """ #http://stev.oapd.inaf.it/cgi-bin/cmd_2.7 try: columns = self.columns[self.survey.lower()] except KeyError as e: logger.warning('Unrecognized survey: %s' % (self.survey)) raise (e) # delimiter='\t' is used to be compatible with OldPadova... # ADW: This should be updated, but be careful of column numbering kwargs = dict(delimiter='\t', usecols=list(columns.keys()), dtype=list(columns.values())) self.data = np.genfromtxt(filename, **kwargs) self.mass_init = self.data['mass_init'] self.mass_act = self.data['mass_act'] self.luminosity = 10**self.data['log_lum'] self.mag_1 = self.data[self.band_1] self.mag_2 = self.data[self.band_2] self.stage = self.data['stage'] self.mass_init_upper_bound = np.max(self.mass_init) self.index = len(self.mass_init) self.mag = self.mag_1 if self.band_1_detection else self.mag_2 self.color = self.mag_1 - self.mag_2
def createAssociations(self): objects = self.objects tol = self.config['search']['proximity'] columns = [] names = np.empty(len(objects),dtype=object) names.fill('') for i,refs in enumerate(self.config['search']['catalogs']): i += 1 catalog = ugali.candidate.associate.SourceCatalog() for ref in refs: catalog += ugali.candidate.associate.catalogFactory(ref) # String length (should be greater than longest name) length = len(max(catalog['name'],key=len)) + 1 dtype = 'S%i'%length; fitstype='%iA'%length assoc = np.empty(len(objects),dtype=dtype) assoc.fill('') idx1,idx2,dist = catalog.match(objects['GLON'],objects['GLAT'],tol=tol) assoc[idx1] = catalog['name'][idx2].astype(dtype) columns.append(pyfits.Column(name='ASSOC%i'%i,format=fitstype,array=assoc)) columns.append(pyfits.Column(name='ANGSEP%i'%i,format='E',array=dist)) if length > objects['NAME'].itemsize: logger.warning("Association name may not fit.") names = np.where(names=='',assoc,names) names = names.astype(objects['NAME'].dtype) objects['NAME'][:] = np.where(names=='',objects['NAME'],names) self.assocs = pyfits.new_table(objects.columns+pyfits.ColDefs(columns)).data self.assocs = self.assocs[self.assocs['NAME'].argsort()]
def _parse(self, filename): """ Reads an isochrone in the Dotter format and determines the age (log10 yrs and Gyr), metallicity (Z and [Fe/H]), and creates arrays with the initial stellar mass and corresponding magnitudes for each step along the isochrone. http://stellar.dartmouth.edu/models/isolf_new.html """ try: columns = self.columns[self.survey.lower()] except KeyError as e: logger.warning('Unrecognized survey: %s' % (survey)) raise (e) kwargs = dict(comments='#', usecols=list(columns.keys()), dtype=list(columns.values())) self.data = np.genfromtxt(filename, **kwargs) # KCB: Not sure whether the mass in Dotter isochrone output # files is initial mass or current mass self.mass_init = self.data['mass'] self.mass_act = self.data['mass'] self.luminosity = 10**self.data['log_lum'] self.mag_1 = self.data[self.band_1] self.mag_2 = self.data[self.band_2] self.stage = np.tile('Main', len(self.data)) # KCB: No post-AGB isochrone data points, right? self.mass_init_upper_bound = np.max(self.mass_init) self.mag = self.mag_1 if self.band_1_detection else self.mag_2 self.color = self.mag_1 - self.mag_2
def randomPositions(input, nside_pix, n=1): """ Generate n random positions within a full HEALPix mask of booleans, or a set of (lon, lat) coordinates. input is either a (1) full HEALPix mask of booleans, or (2) a set of (lon, lat) coordinates for catalog objects that define the occupied pixels. nside_pix is meant to be at coarser resolution than the input mask or catalog object positions so that gaps from star holes, bleed trails, cosmic rays, etc. are filled in. Return the longitude and latitude of the random positions (deg) and the total area (deg^2). """ input = numpy.array(input) if len(input.shape) == 1: if healpy.npix2nside(len(input)) < nside_pix: logger.warning( 'Expected coarser resolution nside_pix in skymap.randomPositions' ) subpix = numpy.nonzero( input )[0] # All the valid pixels in the mask at the NSIDE for the input mask lon, lat = pix2ang(healpy.npix2nside(len(input)), subpix) elif len(input.shape) == 2: lon, lat = input[0], input[1] # All catalog object positions else: logger.warning( 'Unexpected input dimensions for skymap.randomPositions') pix = surveyPixel(lon, lat, nside_pix) # Area with which the random points are thrown area = len(pix) * healpy.nside2pixarea(nside_pix, degrees=True) # Create mask at the coarser resolution mask = numpy.tile(False, healpy.nside2npix(nside_pix)) mask[pix] = True # Estimate the number of points that need to be thrown based off # coverage fraction of the HEALPix mask coverage_fraction = float(numpy.sum(mask)) / len(mask) n_throw = int(n / coverage_fraction) lon, lat = [], [] count = 0 while len(lon) < n: lon_throw = numpy.random.uniform(0., 360., n_throw) lat_throw = numpy.degrees( numpy.arcsin(numpy.random.uniform(-1., 1., n_throw))) pix_throw = ugali.utils.healpix.angToPix(nside_pix, lon_throw, lat_throw) cut = mask[pix_throw].astype(bool) lon = numpy.append(lon, lon_throw[cut]) lat = numpy.append(lat, lat_throw[cut]) count += 1 if count > 10: raise RuntimeError('Too many loops...') return lon[0:n], lat[0:n], area
def __init__(self, input, default=None): """ Initialize a configuration object from a filename or a dictionary. Provides functionality to merge with a default configuration. Parameters: input: Either filename or dictionary (deep copied) default: Default configuration to merge Returns: config """ self.update(self._load(default)) self.update(self._load(input)) # For back-compatibility... self.params = self # Possible filenames from this config (masked by existence) try: self.filenames = self.getFilenames() self._makeFilenames() except: exc_type,exc_value,exc_traceback = sys.exc_info() logger.warning("%s %s"%(exc_type,exc_value)) logger.warning("Filenames could not be created for config.")
def download(self, pixel, outdir=None, force=False): import pyfits if outdir is None: outdir = './' else: mkdir(outdir) sqldir = mkdir(os.path.join(outdir,'sql')) self._setup_desdbi() basename = self.basename + "_%04d"%pixel['name'] sqlname = os.path.join(sqldir,basename+'.sql') taskname = basename outfile = os.path.join(outdir,basename+".fits") # ADW: There should be a 'force' option here if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..."%(outfile)) return logger.info("\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)"%(pixel)) logger.info("Working on "+sqlname) self.generate_query(pixel['ra_min'],pixel['ra_max'],pixel['dec_min'],pixel['dec_max'],sqlname,outfile) ret = self.query(self.release,taskname,sqlname) if ret != 0: msg = "Download failed to complete." raise Exception(msg) return outfile
def __init__(self, config, default=None): """ Initialize a configuration object from a filename or a dictionary. Provides functionality to merge with a default configuration. Parameters: config: filename, dict, or Config object (deep copied) default: default configuration to merge Returns: config """ self.update(self._load(default)) self.update(self._load(config)) self._formatFilepaths() # For back-compatibility... self.params = self # Run some basic validation # ADW: This should be run after creating filenames self._validate() # Filenames from this config (masked by existence) # ADW: We should not recreate filenames if they already exist # in the input config if not hasattr(self, 'filenames'): try: self.filenames = self._createFilenames() except: exc_type, exc_value, exc_traceback = sys.exc_info() logger.warning("%s %s" % (exc_type, exc_value)) logger.warning("Filenames could not be created for config.")
def download(self, pixel, outdir=None, force=False): if outdir is None: outdir = './' else: mkdir(outdir) sqldir = mkdir(os.path.join(outdir, 'sql')) self._setup_casjobs() basename = self.basename + "_%04d" % pixel['name'] sqlname = os.path.join(sqldir, basename + '.sql') dbname = basename + '_output' taskname = basename outfile = os.path.join(outdir, basename + ".fits") if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..." % (outfile)) return logger.info( "\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)" % (pixel)) logger.info("Working on " + sqlname) self.generate_query(pixel['ra_min'], pixel['ra_max'], pixel['dec_min'], pixel['dec_max'], sqlname, dbname) try: self.query(self.release, taskname, sqlname) except subprocess.CalledProcessError, e: logger.error(e.output) self.drop(dbname) raise e
def download(self, pixel, outdir=None, force=False): if outdir is None: outdir = './' else: mkdir(outdir) sqldir = mkdir(os.path.join(outdir,'sql')) self._setup_casjobs() basename = self.basename + "_%04d"%pixel['name'] sqlname = os.path.join(sqldir,basename+'.sql') dbname = basename+'_output' taskname = basename outfile = os.path.join(outdir,basename+".fits") if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..."%(outfile)) return logger.info("\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)"%(pixel)) logger.info("Working on "+sqlname) self.generate_query(pixel['ra_min'],pixel['ra_max'],pixel['dec_min'],pixel['dec_max'],sqlname,dbname) try: self.query(self.release,taskname,sqlname) except subprocess.CalledProcessError, e: logger.error(e.output) self.drop(dbname) raise e
def download(self, pixel, outdir=None, force=False): import pyfits if outdir is None: outdir = './' else: mkdir(outdir) sqldir = mkdir(os.path.join(outdir, 'sql')) self._setup_desdbi() basename = self.basename + "_%04d" % pixel['name'] sqlname = os.path.join(sqldir, basename + '.sql') taskname = basename outfile = os.path.join(outdir, basename + ".fits") # ADW: There should be a 'force' option here if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..." % (outfile)) return logger.info( "\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)" % (pixel)) logger.info("Working on " + sqlname) self.generate_query(pixel['ra_min'], pixel['ra_max'], pixel['dec_min'], pixel['dec_max'], sqlname, outfile) ret = self.query(self.release, taskname, sqlname) if ret != 0: msg = "Download failed to complete." raise Exception(msg) return outfile
def _parse(self, filename): """ Reads an isochrone in the Dotter 2016 format and determines the age (Gyr), metallicity (Z), and creates arrays with the initial stellar mass and corresponding magnitudes for each step along the isochrone. """ try: columns = self.columns[self.survey.lower()] except KeyError as e: logger.warning('Unrecognized survey: %s' % (self.survey)) raise (e) kwargs = dict(comments='#', usecols=list(columns.keys()), dtype=list(columns.values())) data = np.genfromtxt(filename, **kwargs) self.mass_init = data['mass_init'] self.mass_act = data['mass_act'] self.luminosity = 10**data['log_lum'] self.mag_1 = data[self.band_1] self.mag_2 = data[self.band_2] self.stage = data['stage'] # Check where post-AGB isochrone data points begin self.mass_init_upper_bound = np.max(self.mass_init) self.index = np.nonzero(self.stage >= 4)[0][0] self.mag = self.mag_1 if self.band_1_detection else self.mag_2 self.color = self.mag_1 - self.mag_2
def __init__(self, input, default=None): """ Initialize a configuration object from a filename or a dictionary. Provides functionality to merge with a default configuration. Parameters: input: Either filename or dictionary (deep copied) default: Default configuration to merge Returns: config """ self.update(self._load(default)) self.update(self._load(input)) # For back-compatibility... self.params = self # Possible filenames from this config (masked by existence) try: self.filenames = self.getFilenames() self._makeFilenames() except: exc_type,exc_value,exc_traceback = sys.exc_info() logger.warning("%s %s"%(exc_type,exc_value)) logger.warning("Filenames could not be created for config.")
def __init__(self, config, observation, source): # Currently assuming that input mask is ROI-specific self.config = Config(config) self.roi = observation.roi self.mask = observation.mask self.catalog_full = observation.catalog self.clip_catalog() # The source model (includes kernel and isochrone) self.source = source # Effective bin size in color-magnitude space self.delta_mag = self.config['likelihood']['delta_mag'] self.spatial_only = self.config['likelihood'].get('spatial_only',False) self.color_only = self.config['likelihood'].get('color_only',False) if self.spatial_only and self.color_only: msg = "Both 'spatial_only' and 'color_only' set" logger.error(msg) raise ValueError(msg) elif self.spatial_only: logger.warning("Likelihood calculated from spatial information only!!!") elif self.color_only: logger.warning("Likelihood calculated from color information only!!!") self.calc_background()
def simple_split(config,dirname='split',force=False): config = Config(config) filenames = config.getFilenames() healpix = filenames['pix'].compressed() nside_catalog = config['coords']['nside_catalog'] nside_pixel = config['coords']['nside_pixel'] release = config['data']['release'].lower() band_1 = config['catalog']['mag_1_band'] band_2 = config['catalog']['mag_2_band'] mangledir = config['mangle']['dirname'] mangle_file_1 = join(mangledir,config['mangle']['filename_1']) logger.info("Reading %s..."%mangle_file_1) mangle_1 = healpy.read_map(mangle_file_1) mangle_file_2 = join(mangledir,config['mangle']['filename_2']) logger.info("Reading %s..."%mangle_file_2) mangle_2 = healpy.read_map(mangle_file_2) basedir,basename = os.path.split(config['mask']['dirname']) if basename == dirname: msg = "Input and output directory are the same." raise Exception(msg) outdir = mkdir(os.path.join(basedir,dirname)) mask_1 = os.path.basename(config['mask']['basename_1']) mask_2 = os.path.basename(config['mask']['basename_2']) for band,mangle,base in [(band_1,mangle_1,mask_1),(band_2,mangle_2,mask_2)]: maglim = MAGLIMS[release][band] nside_mangle = healpy.npix2nside(len(mangle)) if nside_mangle != nside_pixel: msg = "Mangle nside different from pixel nside" logger.warning(msg) #raise Exception(msg) pixels = np.nonzero((mangle>0)&(mangle>maglim))[0] print len(pixels) superpix = superpixel(pixels,nside_mangle,nside_catalog) print healpix for hpx in healpix: outfile = join(outdir,base)%hpx if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..."%outfile) continue pix = pixels[superpix == hpx] print hpx, len(pix) maglims = maglim*np.ones(len(pix)) data = dict(MAGLIM=maglims ) logger.info('Writing %s...'%outfile) ugali.utils.skymap.writeSparseHealpixMap(pix,data,nside_pixel,outfile)
def match_htm(data, radius=1.0): """ NOT IMPLEMENTED """ logger.warning("'match_htm' not implemented") if True: return import esutil.htm htm = esutil.htm.HTM() kwargs = dict(radius=radius, maxmatch=-1) m = htm.match(data['RA'], data['DEC'], data['RA'], data['DEC'], **kwargs)
def writeSparseHealpixMap(pix, data_dict, nside, outfile, distance_modulus_array = None, coordsys = 'NULL', ordering = 'NULL', header_dict = None): """ Sparse HEALPix maps are used to efficiently store maps of the sky by only writing out the pixels that contain data. Three-dimensional data can be saved by supplying a distance modulus array which is stored in a separate extension. coordsys [gal, cel] ordering [ring, nest] """ hdul = pyfits.HDUList() # Pixel data extension columns_array = [pyfits.Column(name = 'PIX', format = 'K', array = pix)] for key in data_dict.keys(): if data_dict[key].shape[0] != len(pix): logger.warning('First dimension of column %s (%i) does not match number of pixels (%i).'%(key, data_dict[key].shape[0], len(pix))) if len(data_dict[key].shape) == 1: columns_array.append(pyfits.Column(name = key, format = 'E', array = data_dict[key])) elif len(data_dict[key].shape) == 2: columns_array.append(pyfits.Column(name = key, format = '%iE'%(data_dict[key].shape[1]), array = data_dict[key])) else: logger.warning('Unexpected number of data dimensions for column %s.'%(key)) hdu_pix_data = pyfits.new_table(columns_array) hdu_pix_data.header.update('NSIDE', nside) hdu_pix_data.header.update('COORDSYS', coordsys.upper()) hdu_pix_data.header.update('ORDERING', ordering.upper()) hdu_pix_data.header.update(header_dict) hdu_pix_data.name = 'PIX_DATA' hdul.append(hdu_pix_data) # Distance modulus extension if distance_modulus_array is not None: hdu_distance_modulus = pyfits.new_table([pyfits.Column(name = 'DISTANCE_MODULUS', format = 'E', array = distance_modulus_array)]) hdu_distance_modulus.name = 'DISTANCE_MODULUS' hdul.append(hdu_distance_modulus) hdul.writeto(outfile, clobber = True)
def randomPositions(input, nside_pix, n=1): """ Generate n random positions within a full HEALPix mask of booleans, or a set of (lon, lat) coordinates. Parameters: ----------- input : (1) full HEALPix mask of booleans, or (2) a set of (lon, lat) coordinates for catalog objects that define the occupied pixels. nside_pix : nside_pix is meant to be at coarser resolution than the input mask or catalog object positions so that gaps from star holes, bleed trails, cosmic rays, etc. are filled in. Returns: -------- lon,lat,area : Return the longitude and latitude of the random positions (deg) and the total area (deg^2). """ input = np.array(input) if len(input.shape) == 1: if hp.npix2nside(len(input)) < nside_pix: logger.warning('Expected coarser resolution nside_pix in skymap.randomPositions') subpix = np.nonzero(input)[0] # All the valid pixels in the mask at the NSIDE for the input mask lon, lat = pix2ang(hp.npix2nside(len(input)), subpix) elif len(input.shape) == 2: lon, lat = input[0], input[1] # All catalog object positions else: logger.warning('Unexpected input dimensions for skymap.randomPositions') pix = surveyPixel(lon, lat, nside_pix) # Area with which the random points are thrown area = len(pix) * hp.nside2pixarea(nside_pix, degrees=True) # Create mask at the coarser resolution mask = np.tile(False, hp.nside2npix(nside_pix)) mask[pix] = True # Estimate the number of points that need to be thrown based off # coverage fraction of the HEALPix mask coverage_fraction = float(np.sum(mask)) / len(mask) n_throw = int(n / coverage_fraction) lon, lat = [], [] count = 0 while len(lon) < n: lon_throw = np.random.uniform(0., 360., n_throw) lat_throw = np.degrees(np.arcsin(np.random.uniform(-1., 1., n_throw))) pix_throw = ugali.utils.healpix.angToPix(nside_pix, lon_throw, lat_throw) cut = mask[pix_throw].astype(bool) lon = np.append(lon, lon_throw[cut]) lat = np.append(lat, lat_throw[cut]) count += 1 if count > 10: raise RuntimeError('Too many loops...') return lon[0:n], lat[0:n], area
def submit_all(self, coords=None, queue=None, debug=False): """ Submit likelihood analyses on a set of coordinates. If coords is `None`, submit all coordinates in the footprint. Inputs: coords : Array of target locations in Galactic coordinates. queue : Overwrite submit queue. debug : Don't run. """ if coords is None: pixels = np.arange(hp.nside2npix(self.nside_likelihood)) else: coords = np.asarray(coords) if coords.ndim == 1: coords = np.array([coords]) if coords.shape[1] == 2: lon, lat = coords.T radius = np.zeros(len(lon)) elif coords.shape[1] == 3: lon, lat, radius = coords.T else: raise Exception("Unrecognized coords shape:" + str(coords.shape)) #ADW: targets is still in glon,glat if self.config['coords']['coordsys'].lower() == 'cel': lon, lat = gal2cel(lon, lat) vec = ang2vec(lon, lat) pixels = np.zeros(0, dtype=int) for v, r in zip(vec, radius): pix = query_disc(self.nside_likelihood, v, r, inclusive=True, fact=32) pixels = np.hstack([pixels, pix]) #pixels = np.unique(pixels) inside = ugali.utils.skymap.inFootprint(self.config, pixels) if inside.sum() != len(pixels): logger.warning("Ignoring pixels outside survey footprint:\n" + str(pixels[~inside])) if inside.sum() == 0: logger.warning("No pixels inside footprint.") return # Only write the configfile once outdir = mkdir(self.config['output']['likedir']) # Actually copy config instead of re-writing shutil.copy(self.config.filename, outdir) configfile = join(outdir, os.path.basename(self.config.filename)) pixels = pixels[inside] self.submit(pixels, queue=queue, debug=debug, configfile=configfile)
def randomPositions(input, nside_pix, n=1): """ Generate n random positions within a full HEALPix mask of booleans, or a set of (lon, lat) coordinates. nside_pix is meant to be at coarser resolution than the input mask or catalog object positions so that gaps from star holes, bleed trails, cosmic rays, etc. are filled in. Return the longitude and latitude of the random positions and the total area (deg^2). Probably there is a faster algorithm, but limited much more by the simulation and fitting time than by the time it takes to generate random positions within the mask. """ input = numpy.array(input) if len(input.shape) == 1: subpix = numpy.nonzero( input )[0] # All the valid pixels in the mask at the NSIDE for the input mask lon, lat = pix2ang(healpy.npix2nside(len(input)), subpix) elif len(input.shape) == 2: lon, lat = input[0], input[1] # All catalog object positions else: logger.warning( 'Unexpected input dimensions for skymap.randomPositions') pix = surveyPixel(lon, lat, nside_pix) # Area with which the random points are thrown area = len(pix) * healpy.nside2pixarea(nside_pix, degrees=True) lon = [] lat = [] for ii in range(0, n): # Choose an unmasked pixel at random, which is OK because HEALPix is an equal area scheme pix_ii = pix[numpy.random.randint(0, len(pix))] lon_ii, lat_ii = ugali.utils.projector.pixToAng(nside_pix, pix_ii) projector = ugali.utils.projector.Projector(lon_ii, lat_ii) inside = False while not inside: # Apply random offset arcminToDegree = 1 / 60. resolution = arcminToDegree * healpy.nside2resol(nside_pix, arcmin=True) x = 2. * (numpy.random.rand() - 0.5) * resolution # Using factor 2 to be conservative y = 2. * (numpy.random.rand() - 0.5) * resolution lon_candidate, lat_candidate = projector.imageToSphere(x, y) # Make sure that the random position does indeed fall within the randomly selected pixel if ugali.utils.projector.angToPix(nside_pix, lon_candidate, lat_candidate) == pix_ii: inside = True lon.append(lon_candidate) lat.append(lat_candidate) return numpy.array(lon), numpy.array(lat), area
def run(self,grid=None,outdir=None,force=False): if grid is None: aa,zz = self.create_grid() else: aa,zz = grid for a,z in zip(aa,zz): try: self.download(a,z,outdir,force) except RuntimeError, msg: logger.warning(msg)
def get_cat_dir(): """Get the ugali catalog directory.""" dirname = os.path.join(get_ugali_dir(),'catalogs') if not os.path.exists(dirname): from ugali.utils.logger import logger msg = "Catalog directory not found:\n%s"%dirname logger.warning(msg) return dirname
def get_iso_dir(): """Get the ugali isochrone directory.""" dirname = os.path.join(get_ugali_dir(),'isochrones') if not os.path.exists(dirname): from ugali.utils.logger import logger msg = "Isochrone directory not found:\n%s"%dirname logger.warning(msg) return dirname
def do_plot(args): import ugali.utils.plotting import pylab as plt import triangle config, name, label, coord = args print args filenames = make_filenames(config, label) srcfile = filenames['srcfile'] samfile = filenames['samfile'] memfile = filenames['memfile'] if not exists(srcfile): logger.warning("Couldn't find %s; skipping..." % srcfile) return if not exists(samfile): logger.warning("Couldn't find %s; skipping..." % samfile) return config = ugali.utils.config.Config(config) burn = config['mcmc']['nburn'] * config['mcmc']['nwalkers'] source = ugali.analysis.source.Source() source.load(srcfile, section='source') outfile = samfile.replace('.npy', '.png') ugali.utils.plotting.plotTriangle(srcfile, samfile, burn=burn) logger.info(" Writing %s..." % outfile) plt.savefig(outfile, bbox_inches='tight', dpi=60) plt.close() plotter = ugali.utils.plotting.SourcePlotter(source, config, radius=0.5) data = pyfits.open(memfile)[1].data if exists(memfile) else None if data is not None: plt.figure() kernel, isochrone = source.kernel, source.isochrone ugali.utils.plotting.plotMembership(config, data, kernel, isochrone) outfile = samfile.replace('.npy', '_mem.png') logger.info(" Writing %s..." % outfile) plt.savefig(outfile, bbox_inches='tight', dpi=60) plt.close() plotter.plot6(data) outfile = samfile.replace('.npy', '_6panel.png') logger.info(" Writing %s..." % outfile) plt.savefig(outfile, bbox_inches='tight', dpi=60) plt.close() plotter.plot4() outfile = samfile.replace('.npy', '_4panel.png') logger.info(" Writing %s..." % outfile) plt.savefig(outfile, bbox_inches='tight', dpi=60) plt.close()
def do_plot(args): import ugali.utils.plotting import pylab as plt import triangle config,name,label,coord = args print args filenames = make_filenames(config,label) srcfile = filenames['srcfile'] samfile = filenames['samfile'] memfile = filenames['memfile'] if not exists(srcfile): logger.warning("Couldn't find %s; skipping..."%srcfile) return if not exists(samfile): logger.warning("Couldn't find %s; skipping..."%samfile) return config = ugali.utils.config.Config(config) burn = config['mcmc']['nburn']*config['mcmc']['nwalkers'] source = ugali.analysis.source.Source() source.load(srcfile,section='source') outfile = samfile.replace('.npy','.png') ugali.utils.plotting.plotTriangle(srcfile,samfile,burn=burn) logger.info(" Writing %s..."%outfile) plt.savefig(outfile,bbox_inches='tight',dpi=60) plt.close() plotter = ugali.utils.plotting.SourcePlotter(source,config,radius=0.5) data = pyfits.open(memfile)[1].data if exists(memfile) else None if data is not None: plt.figure() kernel,isochrone = source.kernel,source.isochrone ugali.utils.plotting.plotMembership(config,data,kernel,isochrone) outfile = samfile.replace('.npy','_mem.png') logger.info(" Writing %s..."%outfile) plt.savefig(outfile,bbox_inches='tight',dpi=60) plt.close() plotter.plot6(data) outfile = samfile.replace('.npy','_6panel.png') logger.info(" Writing %s..."%outfile) plt.savefig(outfile,bbox_inches='tight',dpi=60) plt.close() plotter.plot4() outfile = samfile.replace('.npy','_4panel.png') logger.info(" Writing %s..."%outfile) plt.savefig(outfile,bbox_inches='tight',dpi=60) plt.close()
def spatialBin(self, roi): """ Calculate indices of ROI pixels corresponding to object locations. """ if hasattr(self,'pixel_roi_index') and hasattr(self,'pixel'): logger.warning('Catalog alread spatially binned') return # ADW: Not safe to set index = -1 (since it will access last entry); # np.inf would be better... self.pixel = ang2pix(self.config['coords']['nside_pixel'],self.lon,self.lat) self.pixel_roi_index = roi.indexROI(self.lon,self.lat) logger.info("Found %i objects outside ROI"%(self.pixel_roi_index < 0).sum())
def _photometricErrors(self, n_per_bin=100, plot=False): """ Realistic photometric errors estimated from catalog objects and mask. Extend below the magnitude threshold with a flat extrapolation. """ self.catalog.spatialBin(self.roi) if len(self.catalog.mag_1) < n_per_bin: logger.warning("Catalog contains fewer objects than requested to calculate errors.") n_per_bin = int(len(self.catalog.mag_1) / 3) # Band 1 mag_1_thresh = self.mask.mask_1.mask_roi_sparse[self.catalog.pixel_roi_index] - self.catalog.mag_1 sorting_indices = numpy.argsort(mag_1_thresh) mag_1_thresh_sort = mag_1_thresh[sorting_indices] mag_err_1_sort = self.catalog.mag_err_1[sorting_indices] # ADW: Can't this be done with numpy.median(axis=?) mag_1_thresh_medians = [] mag_err_1_medians = [] for i in range(0, int(len(mag_1_thresh) / float(n_per_bin))): mag_1_thresh_medians.append(numpy.median(mag_1_thresh_sort[n_per_bin * i: n_per_bin * (i + 1)])) mag_err_1_medians.append(numpy.median(mag_err_1_sort[n_per_bin * i: n_per_bin * (i + 1)])) if mag_1_thresh_medians[0] > 0.: mag_1_thresh_medians = numpy.insert(mag_1_thresh_medians, 0, -99.) mag_err_1_medians = numpy.insert(mag_err_1_medians, 0, mag_err_1_medians[0]) self.photo_err_1 = scipy.interpolate.interp1d(mag_1_thresh_medians, mag_err_1_medians, bounds_error=False, fill_value=mag_err_1_medians[-1]) # Band 2 mag_2_thresh = self.mask.mask_2.mask_roi_sparse[self.catalog.pixel_roi_index] - self.catalog.mag_2 sorting_indices = numpy.argsort(mag_2_thresh) mag_2_thresh_sort = mag_2_thresh[sorting_indices] mag_err_2_sort = self.catalog.mag_err_2[sorting_indices] mag_2_thresh_medians = [] mag_err_2_medians = [] for i in range(0, int(len(mag_2_thresh) / float(n_per_bin))): mag_2_thresh_medians.append(numpy.median(mag_2_thresh_sort[n_per_bin * i: n_per_bin * (i + 1)])) mag_err_2_medians.append(numpy.median(mag_err_2_sort[n_per_bin * i: n_per_bin * (i + 1)])) if mag_2_thresh_medians[0] > 0.: mag_2_thresh_medians = numpy.insert(mag_2_thresh_medians, 0, -99.) mag_err_2_medians = numpy.insert(mag_err_2_medians, 0, mag_err_2_medians[0]) self.photo_err_2 = scipy.interpolate.interp1d(mag_2_thresh_medians, mag_err_2_medians, bounds_error=False, fill_value=mag_err_2_medians[-1])
def _photometricErrors(self, n_per_bin=100, plot=False): """ Realistic photometric errors estimated from catalog objects and mask. Extend below the magnitude threshold with a flat extrapolation. """ self.catalog.spatialBin(self.roi) if len(self.catalog.mag_1) < n_per_bin: logger.warning("Catalog contains fewer objects than requested to calculate errors.") n_per_bin = int(len(self.catalog.mag_1) / 3) # Band 1 mag_1_thresh = self.mask.mask_1.mask_roi_sparse[self.catalog.pixel_roi_index] - self.catalog.mag_1 sorting_indices = numpy.argsort(mag_1_thresh) mag_1_thresh_sort = mag_1_thresh[sorting_indices] mag_err_1_sort = self.catalog.mag_err_1[sorting_indices] # ADW: Can't this be done with numpy.median(axis=?) mag_1_thresh_medians = [] mag_err_1_medians = [] for i in range(0, int(len(mag_1_thresh) / float(n_per_bin))): mag_1_thresh_medians.append(numpy.median(mag_1_thresh_sort[n_per_bin * i: n_per_bin * (i + 1)])) mag_err_1_medians.append(numpy.median(mag_err_1_sort[n_per_bin * i: n_per_bin * (i + 1)])) if mag_1_thresh_medians[0] > 0.: mag_1_thresh_medians = numpy.insert(mag_1_thresh_medians, 0, -99.) mag_err_1_medians = numpy.insert(mag_err_1_medians, 0, mag_err_1_medians[0]) self.photo_err_1 = scipy.interpolate.interp1d(mag_1_thresh_medians, mag_err_1_medians, bounds_error=False, fill_value=mag_err_1_medians[-1]) # Band 2 mag_2_thresh = self.mask.mask_2.mask_roi_sparse[self.catalog.pixel_roi_index] - self.catalog.mag_2 sorting_indices = numpy.argsort(mag_2_thresh) mag_2_thresh_sort = mag_2_thresh[sorting_indices] mag_err_2_sort = self.catalog.mag_err_2[sorting_indices] mag_2_thresh_medians = [] mag_err_2_medians = [] for i in range(0, int(len(mag_2_thresh) / float(n_per_bin))): mag_2_thresh_medians.append(numpy.median(mag_2_thresh_sort[n_per_bin * i: n_per_bin * (i + 1)])) mag_err_2_medians.append(numpy.median(mag_err_2_sort[n_per_bin * i: n_per_bin * (i + 1)])) if mag_2_thresh_medians[0] > 0.: mag_2_thresh_medians = numpy.insert(mag_2_thresh_medians, 0, -99.) mag_err_2_medians = numpy.insert(mag_err_2_medians, 0, mag_err_2_medians[0]) self.photo_err_2 = scipy.interpolate.interp1d(mag_2_thresh_medians, mag_err_2_medians, bounds_error=False, fill_value=mag_err_2_medians[-1])
def _applySelection(self, selection=None): # ADW: This is a hack (eval is unsafe!) if selection is None: selection = self.config['catalog'].get('selection') if not selection: pass elif 'self.data' not in selection: msg = "Selection does not contain 'data'" raise Exception(msg) else: logger.warning('Evaluating selection: \n"%s"' % selection) sel = eval(selection) self.data = self.data[sel]
def randomPositions(input, nside_pix, n=1): """ Generate n random positions within a full HEALPix mask of booleans, or a set of (lon, lat) coordinates. nside_pix is meant to be at coarser resolution than the input mask or catalog object positions so that gaps from star holes, bleed trails, cosmic rays, etc. are filled in. Return the longitude and latitude of the random positions and the total area (deg^2). Probably there is a faster algorithm, but limited much more by the simulation and fitting time than by the time it takes to generate random positions within the mask. """ input = numpy.array(input) if len(input.shape) == 1: subpix = numpy.nonzero(input)[0] # All the valid pixels in the mask at the NSIDE for the input mask lon, lat = pix2ang(healpy.npix2nside(len(input)), subpix) elif len(input.shape) == 2: lon, lat = input[0], input[1] # All catalog object positions else: logger.warning('Unexpected input dimensions for skymap.randomPositions') pix = surveyPixel(lon, lat, nside_pix) # Area with which the random points are thrown area = len(pix) * healpy.nside2pixarea(nside_pix, degrees=True) lon = [] lat = [] for ii in range(0, n): # Choose an unmasked pixel at random, which is OK because HEALPix is an equal area scheme pix_ii = pix[numpy.random.randint(0, len(pix))] lon_ii, lat_ii = ugali.utils.projector.pixToAng(nside_pix, pix_ii) projector = ugali.utils.projector.Projector(lon_ii, lat_ii) inside = False while not inside: # Apply random offset arcminToDegree = 1 / 60. resolution = arcminToDegree * healpy.nside2resol(nside_pix, arcmin=True) x = 2. * (numpy.random.rand() - 0.5) * resolution # Using factor 2 to be conservative y = 2. * (numpy.random.rand() - 0.5) * resolution lon_candidate, lat_candidate = projector.imageToSphere(x, y) # Make sure that the random position does indeed fall within the randomly selected pixel if ugali.utils.projector.angToPix(nside_pix, lon_candidate, lat_candidate) == pix_ii: inside = True lon.append(lon_candidate) lat.append(lat_candidate) return numpy.array(lon), numpy.array(lat), area
def readMangleFile(infile, lon, lat, index=None): """ Mangle must be set up on your system. The index argument is a temporary file naming convention to avoid file conflicts. Coordinates must be given in the native coordinate system of the Mangle file. """ if index is None: index = numpy.random.randint(0, 1.e10) coordinate_file = 'temp_coordinate_%010i.dat' % (index) maglim_file = 'temp_maglim_%010i.dat' % (index) writer = open(coordinate_file, 'w') for ii in range(0, len(lon)): writer.write('%12.5f%12.5f\n' % (lon[ii], lat[ii])) writer.close() os.system('polyid -W %s %s %s || exit' % (infile, coordinate_file, maglim_file)) reader = open(maglim_file) lines = reader.readlines() reader.close() os.remove(maglim_file) os.remove(coordinate_file) maglim = [] for ii in range(1, len(lines)): if len(lines[ii].split()) == 3: maglim.append(float(lines[ii].split()[2])) elif len(lines[ii].split()) == 2: maglim.append(0.) # Coordinates outside of the MANGLE ploygon elif len(lines[ii].split()) > 3: #print 'WARNING: coordinate inside multiple polygons, using weight from first polygon' #maglim.append(float(lines[ii].split()[2])) # Mask out the pixels inside multiple polygons logger.warning( 'Coordinate inside multiple polygons, masking that coordinate.' ) maglim.append(0.) else: logger.warning( 'Cannot parse maglim file, unexpected number of columns, stop reading now.' ) break maglim = numpy.array(maglim) return maglim
def readMangleFile(infile, lon, lat, index = None): """ DEPRECATED: 2018-05-04 Mangle must be set up on your system. The index argument is a temporary file naming convention to avoid file conflicts. Coordinates must be given in the native coordinate system of the Mangle file. """ msg = "'mask.readMangleFile': ADW 2018-05-05" DeprecationWarning(msg) if index is None: index = np.random.randint(0, 1.e10) coordinate_file = 'temp_coordinate_%010i.dat'%(index) maglim_file = 'temp_maglim_%010i.dat'%(index) writer = open(coordinate_file, 'w') for ii in range(0, len(lon)): writer.write('%12.5f%12.5f\n'%(lon[ii], lat[ii])) writer.close() os.system('polyid -W %s %s %s || exit'%(infile, coordinate_file, maglim_file)) reader = open(maglim_file) lines = reader.readlines() reader.close() os.remove(maglim_file) os.remove(coordinate_file) maglim = [] for ii in range(1, len(lines)): if len(lines[ii].split()) == 3: maglim.append(float(lines[ii].split()[2])) elif len(lines[ii].split()) == 2: maglim.append(0.) # Coordinates outside of the MANGLE ploygon elif len(lines[ii].split()) > 3: msg = 'Coordinate inside multiple polygons, masking that coordinate.' logger.warning(msg) maglim.append(0.) else: msg = 'Cannot parse maglim file, unexpected number of columns.' logger.error(msg) break maglim = np.array(maglim) return maglim
def readMangleFile(infile, lon, lat, index = None): """ DEPRECATED: 2018-05-04 Mangle must be set up on your system. The index argument is a temporary file naming convention to avoid file conflicts. Coordinates must be given in the native coordinate system of the Mangle file. """ msg = "'mask.readMangleFile': ADW 2018-05-05" DeprecationWarning(msg) if index is None: index = np.random.randint(0, 1.e10) coordinate_file = 'temp_coordinate_%010i.dat'%(index) maglim_file = 'temp_maglim_%010i.dat'%(index) writer = open(coordinate_file, 'w') for ii in range(0, len(lon)): writer.write('%12.5f%12.5f\n'%(lon[ii], lat[ii])) writer.close() os.system('polyid -W %s %s %s || exit'%(infile, coordinate_file, maglim_file)) reader = open(maglim_file) lines = reader.readlines() reader.close() os.remove(maglim_file) os.remove(coordinate_file) maglim = [] for ii in range(1, len(lines)): if len(lines[ii].split()) == 3: maglim.append(float(lines[ii].split()[2])) elif len(lines[ii].split()) == 2: maglim.append(0.) # Coordinates outside of the MANGLE ploygon elif len(lines[ii].split()) > 3: msg = 'Coordinate inside multiple polygons, masking that coordinate.' logger.warning(msg) maglim.append(0.) else: msg = 'Cannot parse maglim file, unexpected number of columns.' logger.error(msg) break maglim = np.array(maglim) return maglim
def spatialBin(self, roi): """ Calculate indices of ROI pixels corresponding to object locations. """ if hasattr(self,'pixel_roi_index') and hasattr(self,'pixel'): logger.warning('Catalog alread spatially binned') return # ADW: Not safe to set index = -1 (since it will access last entry); # np.inf would be better... self.pixel = ang2pix(self.config['coords']['nside_pixel'],self.lon,self.lat) self.pixel_roi_index = roi.indexROI(self.lon,self.lat) if numpy.any(self.pixel_roi_index < 0): logger.warning("Objects found outside ROI")
def project(self, projector = None): """ Project coordinates on sphere to image plane using Projector class. """ if projector is None: try: self.projector = ugali.utils.projector.Projector(self.config['coords']['reference'][0], self.config['coords']['reference'][1]) except KeyError: logger.warning('Projection reference point is median (lon, lat) of catalog objects') self.projector = ugali.utils.projector.Projector(numpy.median(self.lon), numpy.median(self.lat)) else: self.projector = projector self.x, self.y = self.projector.sphereToImage(self.lon, self.lat)
def get_ugali_dir(): """Get the path to the ugali data directory from the environment""" dirname = os.getenv('UGALIDIR') # Get the HOME directory if not dirname: dirname=os.path.join(os.getenv('HOME'),'.ugali') if not os.path.exists(dirname): from ugali.utils.logger import logger msg = "Creating UGALIDIR:\n%s"%dirname logger.warning(msg) return mkdir(dirname)
def readColorLUT(infile, distance_modulus, mag_1, mag_2, mag_err_1, mag_err_2): """ Take in a color look-up table and return the signal color evaluated for each object. Consider making the argument a Catalog object rather than magnitudes and uncertainties. """ reader = pyfits.open(infile) distance_modulus_array = reader['DISTANCE_MODULUS'].data.field( 'DISTANCE_MODULUS') if not numpy.any( numpy.fabs(distance_modulus_array - distance_modulus) < 1.e-3): logger.warning("Distance modulus %.2f not available in file %s" % (distance_modulus, infile)) logger.warning(' available distance moduli:' + str(distance_modulus_array)) return False distance_modulus_key = '%.2f' % (distance_modulus_array[numpy.argmin( numpy.fabs(distance_modulus_array - distance_modulus))]) bins_mag_err = reader['BINS_MAG_ERR'].data.field('BINS_MAG_ERR') bins_mag_1 = reader['BINS_MAG_1'].data.field('BINS_MAG_1') bins_mag_2 = reader['BINS_MAG_2'].data.field('BINS_MAG_2') # Note that magnitude uncertainty is always assigned by rounding up, is this the right thing to do? index_mag_err_1 = numpy.clip( numpy.digitize(mag_err_1, bins_mag_err) - 1, 0, len(bins_mag_err) - 2) index_mag_err_2 = numpy.clip( numpy.digitize(mag_err_2, bins_mag_err) - 1, 0, len(bins_mag_err) - 2) u_color = numpy.zeros(len(mag_1)) for index_mag_err_1_select in range(0, len(bins_mag_err) - 1): for index_mag_err_2_select in range(0, len(bins_mag_err) - 1): cut = numpy.logical_and(index_mag_err_1 == index_mag_err_1_select, index_mag_err_2 == index_mag_err_2_select) if numpy.sum(cut) < 1: continue histo = reader[distance_modulus_key].data.field( '%i%i' % (index_mag_err_1_select, index_mag_err_2_select)) u_color[cut] = ugali.utils.binning.take2D(histo, mag_2[cut], mag_1[cut], bins_mag_2, bins_mag_1) reader.close() return u_color
def bayes_factor(self,param,burn=None,clip=10.0,bins=50): # CAREFUL: Assumes a flat prior... try: data = self.samples.get(param,burn=burn,clip=clip) except ValueError as msg: logger.warning(msg) return ugali.utils.stats.interval(np.nan) bmin,bmax = self.source.params[param].bounds bins = np.linspace(bmin,bmax,bins) n,b = np.histogram(data,bins=bins,normed=True) prior = 1.0/(bmax-bmin) posterior = n[0] # Excluding the null hypothesis bf = prior/posterior return ugali.utils.stats.interval(bf)
def bayes_factor(self, param, burn=None, clip=10.0, bins=50): # CAREFUL: Assumes a flat prior... try: data = self.samples.get(param, burn=burn, clip=clip) except ValueError as msg: logger.warning(msg) return ugali.utils.stats.interval(np.nan) bmin, bmax = self.source.params[param].bounds bins = np.linspace(bmin, bmax, bins) n, b = np.histogram(data, bins=bins, normed=True) prior = 1.0 / (bmax - bmin) posterior = n[0] # Excluding the null hypothesis bf = prior / posterior return ugali.utils.stats.interval(bf)
def readSparseHealpixMaps(infiles, field, extension='PIX_DATA', default_value=healpy.UNSEEN, construct_map=True): """ Read multiple sparse healpix maps and output the results identically to a single file read. """ if isinstance(infiles, str): infiles = [infiles] pix_array = [] value_array = [] # Create a map based on the first file in the list map = readSparseHealpixMap(infiles[0], field, extension=extension, default_value=healpy.UNSEEN, construct_map=True) for ii in range(0, len(infiles)): logger.debug('(%i/%i) %s' % (ii + 1, len(infiles), infiles[ii])) pix_array_current, value_array_current = readSparseHealpixMap( infiles[ii], field, extension=extension, construct_map=False) pix_array.append(pix_array_current) value_array.append(value_array_current) map[pix_array[ii]] = value_array[ii] # Check to see whether there are any conflicts pix_master = numpy.concatenate(pix_array) value_master = numpy.concatenate(value_array) n_conflicting_pixels = len(pix_master) - len(numpy.unique(pix_master)) if n_conflicting_pixels != 0: logger.warning('%i conflicting pixels during merge.' % (n_conflicting_pixels)) if construct_map: return map else: if n_conflicting_pixels == 0: pix_master = numpy.sort(pix_master) return pix_master, map[pix_master] else: pix_valid = numpy.nonzero(map != default_value)[0] return pix_valid, map[pix_valid]
def project(self, projector = None): """ Project coordinates on sphere to image plane using Projector class. """ msg = "'%s.project': ADW 2018-05-05"%self.__class__.__name__ DeprecationWarning(msg) if projector is None: try: self.projector = ugali.utils.projector.Projector(self.config['coords']['reference'][0], self.config['coords']['reference'][1]) except KeyError: logger.warning('Projection reference point is median (lon, lat) of catalog objects') self.projector = ugali.utils.projector.Projector(np.median(self.lon), np.median(self.lat)) else: self.projector = projector self.x, self.y = self.projector.sphereToImage(self.lon, self.lat)
def do_results(args): """ Write the results output file """ config,name,label,coord = args filenames = make_filenames(config,label) srcfile = filenames['srcfile'] samples = filenames['samfile'] if not exists(srcfile): logger.warning("Couldn't find %s; skipping..."%srcfile) return if not exists(samples): logger.warning("Couldn't find %s; skipping..."%samples) return logger.info("Writing %s..."%srcfile) ugali.analysis.mcmc.write_results(config,srcfile,samples,srcfile)
def do_results(args): """ Write the results output file """ config, name, label, coord = args filenames = make_filenames(config, label) srcfile = filenames['srcfile'] samples = filenames['samfile'] if not exists(srcfile): logger.warning("Couldn't find %s; skipping..." % srcfile) return if not exists(samples): logger.warning("Couldn't find %s; skipping..." % samples) return logger.info("Writing %s..." % srcfile) ugali.analysis.mcmc.write_results(config, srcfile, samples, srcfile)
def submit_all(self, coords=None, queue=None, debug=False): """ Submit likelihood analyses on a set of coordinates. If coords is `None`, submit all coordinates in the footprint. Inputs: coords : Array of target locations in Galactic coordinates. queue : Overwrite submit queue. debug : Don't run. """ if coords is None: pixels = numpy.arange(healpy.nside2npix(self.nside_likelihood)) else: coords = numpy.asarray(coords) if coords.ndim == 1: coords = numpy.array([coords]) if coords.shape[1] == 2: glon,glat = coords.T radius = numpy.zeros(len(glon)) elif coords.shape[1] == 3: glon,glat,radius = coords.T else: raise Exception("Unrecognized coords shape:"+str(coords.shape)) vec = ang2vec(glon,glat) pixels = numpy.zeros(0, dtype=int) for v,r in zip(vec,radius): pix = query_disc(self.nside_likelihood,v,r,inclusive=True,fact=32) pixels = numpy.hstack([pixels, pix]) #pixels = numpy.unique(pixels) inside = ugali.utils.skymap.inFootprint(self.config,pixels) if inside.sum() != len(pixels): logger.warning("Ignoring pixels outside survey footprint:\n"+str(pixels[~inside])) if inside.sum() == 0: logger.warning("No pixels inside footprint.") return # Only write the configfile once outdir = mkdir(self.config['output']['likedir']) configfile = '%s/config_queue.py'%(outdir) self.config.write(configfile) pixels = pixels[inside] self.submit(pixels,queue=queue,debug=debug,configfile=configfile)
def fit_richness(self, atol=1.e-3, maxiter=50): """ Maximize the log-likelihood as a function of richness. ADW 2018-06-04: Does it make sense to set the richness to the mle? Parameters: ----------- atol : absolute tolerence for conversion maxiter : maximum number of iterations Returns: -------- loglike, richness, parabola : the maximum loglike, the mle, and the parabola """ # Check whether the signal probability for all objects are zero # This can occur for finite kernels on the edge of the survey footprint if np.isnan(self.u).any(): logger.warning("NaN signal probability found") return 0., 0., None if not np.any(self.u): logger.warning("Signal probability is zero for all objects") return 0., 0., None if self.f == 0: logger.warning("Observable fraction is zero") return 0., 0., None # Richness corresponding to 0, 1, and 10 observable stars richness = np.array([0., 1./self.f, 10./self.f]) loglike = np.array([self.value(richness=r) for r in richness]) found_maximum = False iteration = 0 while not found_maximum: parabola = ugali.utils.parabola.Parabola(richness, 2.*loglike) if parabola.vertex_x < 0.: found_maximum = True else: richness = np.append(richness, parabola.vertex_x) loglike = np.append(loglike, self.value(richness=richness[-1])) if np.fabs(loglike[-1] - np.max(loglike[0: -1])) < atol: found_maximum = True iteration+=1 if iteration > maxiter: logger.warning("Maximum number of iterations reached") break index = np.argmax(loglike) return loglike[index], richness[index], parabola
def inFootprint(filename,ra,dec): """ Check if set of ra,dec combinations are in footprint. Careful, input files must be in celestial coordinates. filename : Either healpix map or mangle polygon file ra,dec : Celestial coordinates Returns: inside : boolean array of coordinates in footprint """ try: footprint = healpy.read_map(filename,verbose=False) nside = healpy.npix2nside(len(footprint)) pix = ang2pix(nside,ra,dec) inside = (footprint[pix] > 0) except IOError: logger.warning("Failed to load healpix footprint; using MANGLE...") inside = inMangle(filename,ra,dec) return inside
def readColorLUT(infile, distance_modulus, mag_1, mag_2, mag_err_1, mag_err_2): """ Take in a color look-up table and return the signal color evaluated for each object. Consider making the argument a Catalog object rather than magnitudes and uncertainties. """ reader = pyfits.open(infile) distance_modulus_array = reader['DISTANCE_MODULUS'].data.field('DISTANCE_MODULUS') if not numpy.any(numpy.fabs(distance_modulus_array - distance_modulus) < 1.e-3): logger.warning("Distance modulus %.2f not available in file %s"%(distance_modulus, infile)) logger.warning(' available distance moduli:'+str(distance_modulus_array)) return False distance_modulus_key = '%.2f'%(distance_modulus_array[numpy.argmin(numpy.fabs(distance_modulus_array - distance_modulus))]) bins_mag_err = reader['BINS_MAG_ERR'].data.field('BINS_MAG_ERR') bins_mag_1 = reader['BINS_MAG_1'].data.field('BINS_MAG_1') bins_mag_2 = reader['BINS_MAG_2'].data.field('BINS_MAG_2') # Note that magnitude uncertainty is always assigned by rounding up, is this the right thing to do? index_mag_err_1 = numpy.clip(numpy.digitize(mag_err_1, bins_mag_err) - 1, 0, len(bins_mag_err) - 2) index_mag_err_2 = numpy.clip(numpy.digitize(mag_err_2, bins_mag_err) - 1, 0, len(bins_mag_err) - 2) u_color = numpy.zeros(len(mag_1)) for index_mag_err_1_select in range(0, len(bins_mag_err) - 1): for index_mag_err_2_select in range(0, len(bins_mag_err) - 1): cut = numpy.logical_and(index_mag_err_1 == index_mag_err_1_select, index_mag_err_2 == index_mag_err_2_select) if numpy.sum(cut) < 1: continue histo = reader[distance_modulus_key].data.field('%i%i'%(index_mag_err_1_select, index_mag_err_2_select)) u_color[cut] = ugali.utils.binning.take2D(histo, mag_2[cut], mag_1[cut], bins_mag_2, bins_mag_1) reader.close() return u_color
def _parse(self, roi=None): """ Helper function to parse a catalog file and return a pyfits table. CSV format not yet validated. !!! Careful, reading a large catalog is memory intensive !!! """ filenames = self.config.getFilenames() if len(filenames['catalog'].compressed()) == 0: raise Exception("No catalog file found") elif roi is not None: pixels = roi.getCatalogPixels() self.data = readCatalogData(filenames['catalog'][pixels]) elif len(filenames['catalog'].compressed()) == 1: file_type = filenames[0].split('.')[-1].strip().lower() if file_type == 'csv': self.data = numpy.recfromcsv(filenames[0], delimiter = ',') elif file_type in ['fit', 'fits']: self.data = pyfits.open(filenames[0])[1].data else: logger.warning('Unrecognized catalog file extension %s'%(file_type)) else: self.data = readCatalogData(filenames['catalog'].compressed()) # ADW: This is horrible and should never be done... selection = self.config['catalog'].get('selection') if not selection: pass elif 'self.data' not in selection: msg = "Selection does not contain 'data'" raise Exception(msg) else: logger.warning('Evaluating selection: \n"%s"'%selection) sel = eval(selection) self.data = self.data[sel]
def readSparseHealpixMaps(infiles, field, extension='PIX_DATA', default_value=healpy.UNSEEN, construct_map=True): """ Read multiple sparse healpix maps and output the results identically to a single file read. """ if isinstance(infiles,basestring): infiles = [infiles] pix_array = [] value_array = [] # Create a map based on the first file in the list map = readSparseHealpixMap(infiles[0], field, extension=extension, default_value=healpy.UNSEEN, construct_map=True) for ii in range(0, len(infiles)): logger.debug('(%i/%i) %s'%(ii+1, len(infiles), infiles[ii])) pix_array_current, value_array_current = readSparseHealpixMap(infiles[ii], field, extension=extension, construct_map=False) pix_array.append(pix_array_current) value_array.append(value_array_current) map[pix_array[ii]] = value_array[ii] # Check to see whether there are any conflicts pix_master = numpy.concatenate(pix_array) value_master = numpy.concatenate(value_array) n_conflicting_pixels = len(pix_master) - len(numpy.unique(pix_master)) if n_conflicting_pixels != 0: logger.warning('%i conflicting pixels during merge.'%(n_conflicting_pixels)) if construct_map: return map else: if n_conflicting_pixels == 0: pix_master = numpy.sort(pix_master) return pix_master, map[pix_master] else: pix_valid = numpy.nonzero(map != default_value)[0] return pix_valid, map[pix_valid]
def createAssociations(self): objects = self.objects tol = self.config['search']['proximity'] columns = odict() names = np.empty(len(objects),dtype=object) names.fill('') for i,refs in enumerate(self.config['search']['catalogs']): i += 1 catalog = SourceCatalog() for ref in refs: print(ref) catalog += catalogFactory(ref) # String length (should be greater than longest name) length = len(max(catalog['name'],key=len)) + 1 dtype = 'S%i'%length; fitstype='%iA'%length assoc = np.empty(len(objects),dtype=dtype) assoc.fill('') angsep = np.zeros(len(objects),dtype=np.float32) idx1,idx2,sep = catalog.match(objects['GLON'],objects['GLAT'],tol=tol) assoc[idx1] = catalog['name'][idx2].astype(dtype) angsep[idx1] = sep columns['ASSOC%i'%i] = assoc columns['ANGSEP%i'%i] = angsep if length > objects['NAME'].itemsize: logger.warning("Association name may not fit.") names = np.where(names=='',assoc,names) names = names.astype(objects['NAME'].dtype) objects['NAME'][:] = np.where(names=='',objects['NAME'],names) objects['NAME'][:] = np.char.replace(objects['NAME'],'_',' ') self.assocs=mlab.rec_append_fields(objects,columns.keys(),columns.values()) self.assocs=self.assocs[self.assocs['NAME'].argsort()]