def download(self, pixel, outdir=None, force=False): if outdir is None: outdir = './' else: mkdir(outdir) sqldir = mkdir(os.path.join(outdir,'sql')) self._setup_casjobs() basename = self.basename + "_%04d"%pixel['name'] sqlname = os.path.join(sqldir,basename+'.sql') dbname = basename+'_output' taskname = basename outfile = os.path.join(outdir,basename+".fits") if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..."%(outfile)) return logger.info("\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)"%(pixel)) logger.info("Working on "+sqlname) self.generate_query(pixel['ra_min'],pixel['ra_max'],pixel['dec_min'],pixel['dec_max'],sqlname,dbname) try: self.query(self.release,taskname,sqlname) except subprocess.CalledProcessError, e: logger.error(e.output) self.drop(dbname) raise e
def download(self, pixel, outdir=None, force=False): import pyfits if outdir is None: outdir = './' else: mkdir(outdir) sqldir = mkdir(os.path.join(outdir, 'sql')) self._setup_desdbi() basename = self.basename + "_%04d" % pixel['name'] sqlname = os.path.join(sqldir, basename + '.sql') taskname = basename outfile = os.path.join(outdir, basename + ".fits") # ADW: There should be a 'force' option here if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..." % (outfile)) return logger.info( "\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)" % (pixel)) logger.info("Working on " + sqlname) self.generate_query(pixel['ra_min'], pixel['ra_max'], pixel['dec_min'], pixel['dec_max'], sqlname, outfile) ret = self.query(self.release, taskname, sqlname) if ret != 0: msg = "Download failed to complete." raise Exception(msg) return outfile
def download(self, pixel, outdir=None, force=False): import pyfits if outdir is None: outdir = './' else: mkdir(outdir) sqldir = mkdir(os.path.join(outdir,'sql')) self._setup_desdbi() basename = self.basename + "_%04d"%pixel['name'] sqlname = os.path.join(sqldir,basename+'.sql') taskname = basename outfile = os.path.join(outdir,basename+".fits") # ADW: There should be a 'force' option here if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..."%(outfile)) return logger.info("\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)"%(pixel)) logger.info("Working on "+sqlname) self.generate_query(pixel['ra_min'],pixel['ra_max'],pixel['dec_min'],pixel['dec_max'],sqlname,outfile) ret = self.query(self.release,taskname,sqlname) if ret != 0: msg = "Download failed to complete." raise Exception(msg) return outfile
def run(self): search = CandidateSearch(self.config) self.search = search if 'label' in self.opts.run: logger.info("Running 'label'...") if exists(search.labelfile) and not self.opts.force: logger.info(" Found %s; skipping..."%search.labelfile) else: #search.createLabels3D() search.createLabels2D() search.writeLabels() if 'objects' in self.opts.run: logger.info("Running 'objects'...") if exists(search.objectfile) and not self.opts.force: logger.info(" Found %s; skipping..."%search.labelfile) else: search.loadLabels() search.createObjects() search.writeObjects() if 'associate' in self.opts.run: logger.info("Running 'associate'...") if exists(search.assocfile) and not self.opts.force: logger.info(" Found %s; skipping..."%search.assocfile) else: search.loadObjects() search.createAssociations() search.writeAssociations() if 'candidate' in self.opts.run: logger.info("Running 'candidate'...") if exists(search.candfile) and not self.opts.force: logger.info(" Found %s; skipping..."%search.candfile) else: search.loadAssociations() search.writeCandidates() if 'plot' in self.opts.run: logger.info("Running 'plot'...") import fitsio threshold = self.config['search']['cand_threshold'] outdir = mkdir(self.config['output']['plotdir']) logdir = mkdir(os.path.join(outdir,'log')) # Eventually move this into 'plotting' module candidates = fitsio.read(self.config.candfile,lower=True,trim_strings=True) candidates = candidates[candidates['ts'] >= threshold] for i,c in enumerate(candidates): msg = "(%i/%i) Plotting %s (%.2f,%.2f)..."%(i,len(candidates),c['name'],c['ra'],c['dec']) logger.info(msg) params = (self.opts.config,outdir,c['name'],c['ra'], c['dec'],0.5,c['modulus']) cmd = 'ugali/scratch/PlotCandidate.py %s %s -n="%s" --cel %f %f --radius %s -m %.2f' cmd = cmd%params logger.info(cmd) jobname = c['name'].lower().replace(' ','_') logfile = os.path.join(logdir,jobname+'.log') batch = self.config['search'].get('batch',self.config['batch']) self.batch.submit(cmd,jobname,logfile,**batch['opts']) time.sleep(3)
def download(self, pixel, outdir=None, force=False): if outdir is None: outdir = './' else: mkdir(outdir) sqldir = mkdir(os.path.join(outdir, 'sql')) self._setup_casjobs() basename = self.basename + "_%04d" % pixel['name'] sqlname = os.path.join(sqldir, basename + '.sql') dbname = basename + '_output' taskname = basename outfile = os.path.join(outdir, basename + ".fits") if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..." % (outfile)) return logger.info( "\nDownloading pixel: %(name)i (ra=%(ra_min)g:%(ra_max)g,dec=%(dec_min)g:%(dec_max)g)" % (pixel)) logger.info("Working on " + sqlname) self.generate_query(pixel['ra_min'], pixel['ra_max'], pixel['dec_min'], pixel['dec_max'], sqlname, dbname) try: self.query(self.release, taskname, sqlname) except subprocess.CalledProcessError, e: logger.error(e.output) self.drop(dbname) raise e
def run(self): if 'scan' in self.opts.run: logger.info("Running 'scan'...") farm = Farm(self.config,verbose=self.opts.verbose) farm.submit_all(coords=self.opts.coords,queue=self.opts.queue,debug=self.opts.debug) if 'merge' in self.opts.run: logger.info("Running 'merge'...") mergefile = self.config.mergefile roifile = self.config.roifile filenames = self.config.likefile.split('_%')[0]+'_*.fits' infiles = sorted(glob.glob(filenames)) if exists(mergefile) and not self.opts.force: logger.warn(" Found %s; skipping..."%mergefile) else: logger.info(" Merging likelihood files...") ugali.utils.healpix.merge_partial_maps(infiles,mergefile) if exists(roifile) and not self.opts.force: logger.warn(" Found %s; skipping..."%roifile) else: logger.info(" Merging likelihood headers...") ugali.utils.healpix.merge_likelihood_headers(infiles,roifile) #ugali.utils.skymap.mergeLikelihoodFiles(infiles,mergefile,roifile) if 'tar' in self.opts.run: logger.info("Running 'tar'...") outdir = mkdir(self.config['output']['likedir']) logdir = mkdir(join(outdir,'log')) scanfile = self.config.likefile.split('_%')[0]+'_[0-9]*.fits' tarfile = join(self.config.likefile.split('_%')[0]+'_pixels.tar.gz') jobname = 'tar' logfile = os.path.join(logdir,'scan_tar.log') cmd = 'tar --remove-files -cvzf %s %s'%(tarfile,scanfile) if exists(tarfile) and not self.opts.force: logger.warn(" Found %s; skipping..."%tarfile) else: logger.info(" Tarring likelihood files...") logger.info(cmd) self.batch.submit(cmd,jobname,logfile) if 'plot' in self.opts.run: # WARNING: Loading the full 3D healpix map is memory intensive. logger.info("Running 'plot'...") # Should do this in environment variable import matplotlib matplotlib.use('Agg') import pylab as plt import ugali.utils.plotting as plotting skymap = ugali.utils.skymap.readSparseHealpixMap(self.config.mergefile,'LOG_LIKELIHOOD')[1] plotting.plotSkymap(skymap) outdir = mkdir(self.config['output']['plotdir']) basename = os.path.basename(self.config.mergefile.replace('.fits','.png')) outfile = os.path.join(outdir,basename) plt.savefig(outfile)
def run(self): search = CandidateSearch(self.config) self.search = search if 'label' in self.opts.run: logger.info("Running 'label'...") if exists(search.labelfile) and not self.opts.force: logger.info(" Found %s; skipping..."%search.labelfile) else: #search.createLabels3D() search.createLabels2D() search.writeLabels() if 'objects' in self.opts.run: logger.info("Running 'objects'...") if exists(search.objectfile) and not self.opts.force: logger.info(" Found %s; skipping..."%search.labelfile) else: search.loadLabels() search.createObjects() search.writeObjects() if 'associate' in self.opts.run: logger.info("Running 'associate'...") if exists(search.assocfile) and not self.opts.force: logger.info(" Found %s; skipping..."%search.assocfile) else: search.loadObjects() search.createAssociations() search.writeAssociations() if 'candidate' in self.opts.run: logger.info("Running 'candidate'...") if exists(search.candfile) and not self.opts.force: logger.info(" Found %s; skipping..."%search.candfile) else: search.loadAssociations() search.writeCandidates() if 'plot' in self.opts.run: logger.info("Running 'plot'...") import pyfits threshold = self.config['search']['cand_threshold'] outdir = mkdir(self.config['output']['plotdir']) logdir = mkdir(os.path.join(outdir,'log')) # Eventually move this into 'plotting' module candidates = pyfits.open(self.config.candfile)[1].data candidates = candidates[candidates['TS'] >= threshold] for candidate in candidates: logger.info("Plotting %s (%.2f,%.2f)..."%(candidate['name'],candidate['glon'],candidate['glat'])) params = (self.opts.config,outdir,candidate['name'],candidate['ra'], candidate['dec'],0.5,candidate['modulus']) cmd = 'ugali/scratch/PlotCandidate.py %s %s -n="%s" --cel %f %f --radius %s -m %.2f' cmd = cmd%params print cmd jobname = candidate['name'].lower().replace(' ','_') logfile = os.path.join(logdir,jobname+'.log') self.batch.submit(cmd,jobname,logfile) time.sleep(5)
def run(self): if 'scan' in self.opts.run: logger.info("Running 'scan'...") farm = Farm(self.config, verbose=self.opts.verbose) farm.submit_all(coords=self.opts.coords, queue=self.opts.queue, debug=self.opts.debug) if 'merge' in self.opts.run: logger.info("Running 'merge'...") mergefile = self.config.mergefile roifile = self.config.roifile filenames = self.config.likefile.split('_%')[0] + '_*' infiles = sorted(glob.glob(filenames)) if exists(mergefile) and not self.opts.force: logger.info(" Found %s; skipping..." % mergefile) else: logger.info(" Merging likelihood files...") ugali.utils.healpix.merge_partial_maps(infiles, mergefile) if exists(roifile) and not self.opts.force: logger.info(" Found %s; skipping..." % roifile) else: logger.info(" Merging likelihood headers...") ugali.utils.healpix.merge_likelihood_headers(infiles, roifile) #ugali.utils.skymap.mergeLikelihoodFiles(infiles,mergefile,roifile) if 'tar' in self.opts.run: logger.info("Running 'tar'...") outdir = mkdir(self.config['output']['likedir']) logdir = mkdir(join(outdir, 'log')) scanfile = self.config.likefile.split('_%')[0] + '_[0-9]*.fits' tarfile = join(self.config.likefile.split('_%')[0] + '_pixels.tar.gz') jobname = 'tar' logfile = os.path.join(logdir, 'scan_tar.log') cmd = 'tar --remove-files -cvzf %s %s' % (tarfile, scanfile) logger.info(cmd) self.batch.submit(cmd, jobname, logfile) if 'plot' in self.opts.run: # WARNING: Loading the full 3D healpix map is memory intensive. logger.info("Running 'plot'...") import matplotlib matplotlib.use('Agg') import pylab as plt import ugali.utils.plotting as plotting skymap = ugali.utils.skymap.readSparseHealpixMap( self.config.mergefile, 'LOG_LIKELIHOOD')[1] plotting.plotSkymap(skymap) outdir = mkdir(self.config['output']['plotdir']) basename = os.path.basename( self.config.mergefile.replace('.fits', '.png')) outfile = os.path.join(outdir, basename) plt.savefig(outfile)
def _config(self): self.nside = self.config['coords']['nside_pixel'] self.threshold = self.config['search']['obj_threshold'] # = 10 self.xsize = self.config['search']['xsize'] # = 10000 self.minpix = self.config['search']['minpix'] # = 1 self.mergefile = self.config.mergefile self.roifile = self.config.roifile self.labelfile = self.config.labelfile self.objectfile = self.config.objectfile self.assocfile = self.config.assocfile self.candfile = self.config.candfile mkdir(self.config['output']['searchdir'])
def _config(self): self.nside = self.config['coords']['nside_pixel'] self.threshold = self.config['search']['obj_threshold'] # = 10 self.xsize = self.config['search']['xsize'] # = 10000 self.minpix = self.config['search']['minpix'] # = 1 self.mergefile = self.config.mergefile self.roifile = self.config.roifile self.labelfile = self.config.labelfile self.objectfile = self.config.objectfile self.assocfile = self.config.assocfile self.candfile = self.config.candfile mkdir(self.config['output']['searchdir'])
def run(self, field=None, simple=False, force=False): """ Loop through pixels containing catalog objects and calculate the magnitude limit. This gets a bit convoluted due to all the different pixel resolutions... """ if field is None: fields = [1, 2] else: fields = [field] for filenames in self.filenames.compress( ~self.filenames.mask['catalog']).data: infile = filenames['catalog'] for f in fields: outfile = filenames['mask_%i' % f] if os.path.exists(outfile) and not force: logger.info("Found %s; skipping..." % outfile) continue pixels, maglims = self.calculate(infile, f, simple) logger.info("Creating %s" % outfile) outdir = mkdir(os.path.dirname(outfile)) data = odict() data['PIXEL'] = pixels data['MAGLIM'] = maglims.astype('f4') ugali.utils.healpix.write_partial_map(outfile, data, self.nside_pixel)
def simple_maglims(config,dirname='simple',force=False): """ Create simple, uniform magnitude limits based on nominal survey depth. """ filenames = config.getFilenames() release = config['data']['release'].lower() #band_1 = config['isochrone']['mag_1_field'] #band_2 = config['isochrone']['mag_2_field'] band_1 = config['catalog']['mag_1_field'] band_2 = config['catalog']['mag_2_field'] mask_1 = filenames['mask_1'].compressed() mask_2 = filenames['mask_2'].compressed() basedir,basename = os.path.split(config['mask']['dirname']) if basename == dirname: raise Exception("Input and output directory are the same.") outdir = mkdir(os.path.join(basedir,dirname)) for band, infiles in [(band_1,mask_1),(band_2,mask_2)]: maglim = MAGLIMS[release][band] for infile in infiles: basename = os.path.basename(infile) outfile = join(outdir,basename) logger.debug('Reading %s...'%infile) f = pyfits.open(infile) f[1].data['MAGLIM'][:] = maglim logger.debug('Writing %s...'%outfile) f.writeto(outfile,clobber=True)
def pixelizeDensity(config, nside=None, force=False): if nside is None: nside = config['coords']['nside_likelihood'] coordsys = config['coords']['coordsys'].upper() filenames = config.getFilenames() infiles = filenames[~filenames['catalog'].mask] lon_field = config['catalog']['lon_field'].upper() lat_field = config['catalog']['lat_field'].upper() for ii,f in enumerate(infiles.data): infile = f['catalog'] pix = f['pix'] logger.info('(%i/%i) %s'%(ii+1, len(infiles), infile)) outfile = config['data']['density']%pix if os.path.exists(outfile) and not force: logger.info("Found %s; skipping..."%outfile) continue outdir = mkdir(os.path.dirname(outfile)) pixels, density = stellarDensity(infile,nside, lon_field=lon_field,lat_field=lat_field) data = dict(PIXEL=pixels,DENSITY=density) healpix.write_partial_map(outfile,data,nside=nside,coord=coordsys[0])
def pixelizeCatalog(infiles, config, force=False): """ Break catalog into chunks by healpix pixel. Parameters: ----------- infiles : List of input files config : Configuration file force : Overwrite existing files (depricated) Returns: -------- None """ nside_catalog = config['coords']['nside_catalog'] nside_pixel = config['coords']['nside_pixel'] outdir = mkdir(config['catalog']['dirname']) filenames = config.getFilenames() for i, filename in enumerate(infiles): logger.info('(%i/%i) %s' % (i + 1, len(infiles), filename)) data = fitsio.read(filename) logger.info("%i objects found" % len(data)) if not len(data): continue glon, glat = cel2gal(data['RA'], data['DEC']) cat_pix = ang2pix(nside_catalog, glon, glat) pix_pix = ang2pix(nside_pixel, glon, glat) cat_pix_name = 'PIX%i' % nside_catalog pix_pix_name = 'PIX%i' % nside_pixel data = mlab.rec_append_fields( data, names=['GLON', 'GLAT', cat_pix_name, pix_pix_name], arrs=[glon, glat, cat_pix, pix_pix], dtypes=['f4', 'f4', int, int]) for pix in np.unique(cat_pix): logger.debug("Processing pixel %s" % pix) arr = data[cat_pix == pix] outfile = filenames.data['catalog'][pix] if not os.path.exists(outfile): logger.debug("Creating %s" % outfile) out = fitsio.FITS(outfile, mode='rw') out.write(arr) hdr = ugali.utils.healpix.header_odict(nside=nside_catalog, coord='G') for key in ['PIXTYPE', 'ORDERING', 'NSIDE', 'COORDSYS']: out[1].write_key(*list(hdr[key].values())) out[1].write_key('PIX', pix, comment='HEALPIX pixel for this file') else: out = fitsio.FITS(outfile, mode='rw') out[1].append(arr) logger.debug("Writing %s" % outfile) out.close()
def pixelizeCatalog(infiles, config, force=False): """ Break catalog up into a set of healpix files. """ nside_catalog = config['coords']['nside_catalog'] nside_pixel = config['coords']['nside_pixel'] outdir = mkdir(config['catalog']['dirname']) filenames = config.getFilenames() for ii,infile in enumerate(infiles): logger.info('(%i/%i) %s'%(ii+1, len(infiles), infile)) f = pyfits.open(infile) data = f[1].data header = f[1].header logger.info("%i objects found"%len(data)) if not len(data): continue glon,glat = cel2gal(data['RA'],data['DEC']) catalog_pix = ang2pix(nside_catalog,glon,glat,coord='GAL') pixel_pix = ang2pix(nside_pixel,glon,glat,coord='GAL') names = [n.upper() for n in data.columns.names] ra_idx = names.index('RA'); dec_idx = names.index('DEC') idx = ra_idx if ra_idx > dec_idx else dec_idx catalog_pix_name = 'PIX%i'%nside_catalog pixel_pix_name = 'PIX%i'%nside_pixel coldefs = pyfits.ColDefs( [pyfits.Column(name='GLON',format='1D',array=glon), pyfits.Column(name='GLAT',format='1D',array=glat), pyfits.Column(name=catalog_pix_name,format='1J',array=catalog_pix), pyfits.Column(name=pixel_pix_name ,format='1J',array=pixel_pix)] ) hdu = pyfits.new_table(data.columns[:idx+1]+coldefs+data.columns[idx+1:]) table = hdu.data for pix in numpy.unique(catalog_pix): logger.debug("Processing pixel %s"%pix) outfile = filenames.data['catalog'][pix] if not os.path.exists(outfile): logger.debug("Creating %s"%outfile) names = [n.upper() for n in table.columns.names] formats = table.columns.formats columns = [pyfits.Column(n,f) for n,f in zip(names,formats)] out = pyfits.HDUList([pyfits.PrimaryHDU(),pyfits.new_table(columns)]) out[1].header['NSIDE'] = nside_catalog out[1].header['PIX'] = pix out.writeto(outfile) hdulist = pyfits.open(outfile,mode='update') t1 = hdulist[1].data # Could we speed up with sorting and indexing? t2 = table[ table[catalog_pix_name] == pix ] nrows1 = t1.shape[0] nrows2 = t2.shape[0] nrows = nrows1 + nrows2 out = pyfits.new_table(t1.columns, nrows=nrows) for name in t1.columns.names: out.data.field(name)[nrows1:]=t2.field(name) hdulist[1] = out logger.debug("Writing %s"%outfile) hdulist.flush() hdulist.close()
def pixelizeDensity(config, nside=None, force=False): if nside is None: nside = config['coords']['nside_likelihood'] coordsys = config['coords']['coordsys'].upper() filenames = config.getFilenames() infiles = filenames[~filenames['catalog'].mask] lon_field = config['catalog']['lon_field'].upper() lat_field = config['catalog']['lat_field'].upper() for ii, f in enumerate(infiles.data): infile = f['catalog'] pix = f['pix'] logger.info('(%i/%i) %s' % (ii + 1, len(infiles), infile)) outfile = config['data']['density'] % pix if os.path.exists(outfile) and not force: logger.info("Found %s; skipping..." % outfile) continue outdir = mkdir(os.path.dirname(outfile)) pixels, density = stellarDensity(infile, nside, lon_field=lon_field, lat_field=lat_field) data = dict(PIXEL=pixels, DENSITY=density) healpix.write_partial_map(outfile, data, nside=nside, coord=coordsys[0])
def download(self,age=None,metallicity=None,outdir=None,force=False): """ Check valid parameter range and download isochrones from: http://stev.oapd.inaf.it/cgi-bin/cmd """ try: from urllib.error import URLError except ImportError: from urllib2 import URLError if age is None: age = float(self.age) if metallicity is None: metallicity = float(self.metallicity) if outdir is None: outdir = './' basename = self.params2filename(age,metallicity) outfile = os.path.join(outdir,basename) if os.path.exists(outfile) and not force: try: self.verify(outfile,self.survey,age,metallicity) logger.info("Found %s; skipping..."%(outfile)) return except Exception as e: msg = "Overwriting corrupted %s..."%(outfile) logger.warn(msg) os.remove(outfile) mkdir(outdir) self.print_info(age,metallicity) self.query_server(outfile,age,metallicity) if not os.path.exists(outfile): raise RuntimeError('Download failed') try: self.verify(outfile,self.survey,age,metallicity) except Exception as e: msg = "Output file is corrupted." logger.error(msg) msg = "Removing %s."%outfile logger.info(msg) os.remove(outfile) raise(e) return outfile
def download(self,age=None,metallicity=None,outdir=None,force=False): """ Check valid parameter range and download isochrones from: http://stev.oapd.inaf.it/cgi-bin/cmd """ try: from urllib.error import URLError except ImportError: from urllib2 import URLError if age is None: age = float(self.age) if metallicity is None: metallicity = float(self.metallicity) if outdir is None: outdir = './' basename = self.params2filename(age,metallicity) outfile = os.path.join(outdir,basename) if os.path.exists(outfile) and not force: try: self.verify(outfile,self.survey,age,metallicity) logger.info("Found %s; skipping..."%(outfile)) return except Exception as e: msg = "Overwriting corrupted %s..."%(outfile) logger.warn(msg) os.remove(outfile) mkdir(outdir) self.print_info(age,metallicity) self.query_server(outfile,age,metallicity) if not os.path.exists(outfile): raise RuntimeError('Download failed') try: self.verify(outfile,self.survey,age,metallicity) except Exception as e: msg = "Output file is corrupted." logger.error(msg) msg = "Removing %s."%outfile logger.info(msg) os.remove(outfile) raise(e) return outfile
def simple_split(config,dirname='split',force=False): config = Config(config) filenames = config.getFilenames() healpix = filenames['pix'].compressed() nside_catalog = config['coords']['nside_catalog'] nside_pixel = config['coords']['nside_pixel'] release = config['data']['release'].lower() band_1 = config['catalog']['mag_1_band'] band_2 = config['catalog']['mag_2_band'] mangledir = config['mangle']['dirname'] mangle_file_1 = join(mangledir,config['mangle']['filename_1']) logger.info("Reading %s..."%mangle_file_1) mangle_1 = healpy.read_map(mangle_file_1) mangle_file_2 = join(mangledir,config['mangle']['filename_2']) logger.info("Reading %s..."%mangle_file_2) mangle_2 = healpy.read_map(mangle_file_2) basedir,basename = os.path.split(config['mask']['dirname']) if basename == dirname: msg = "Input and output directory are the same." raise Exception(msg) outdir = mkdir(os.path.join(basedir,dirname)) mask_1 = os.path.basename(config['mask']['basename_1']) mask_2 = os.path.basename(config['mask']['basename_2']) for band,mangle,base in [(band_1,mangle_1,mask_1),(band_2,mangle_2,mask_2)]: maglim = MAGLIMS[release][band] nside_mangle = healpy.npix2nside(len(mangle)) if nside_mangle != nside_pixel: msg = "Mangle nside different from pixel nside" logger.warning(msg) #raise Exception(msg) pixels = np.nonzero((mangle>0)&(mangle>maglim))[0] print len(pixels) superpix = superpixel(pixels,nside_mangle,nside_catalog) print healpix for hpx in healpix: outfile = join(outdir,base)%hpx if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..."%outfile) continue pix = pixels[superpix == hpx] print hpx, len(pix) maglims = maglim*np.ones(len(pix)) data = dict(MAGLIM=maglims ) logger.info('Writing %s...'%outfile) ugali.utils.skymap.writeSparseHealpixMap(pix,data,nside_pixel,outfile)
def submit_all(self, coords=None, queue=None, debug=False): """ Submit likelihood analyses on a set of coordinates. If coords is `None`, submit all coordinates in the footprint. Inputs: coords : Array of target locations in Galactic coordinates. queue : Overwrite submit queue. debug : Don't run. """ if coords is None: pixels = np.arange(hp.nside2npix(self.nside_likelihood)) else: coords = np.asarray(coords) if coords.ndim == 1: coords = np.array([coords]) if coords.shape[1] == 2: lon, lat = coords.T radius = np.zeros(len(lon)) elif coords.shape[1] == 3: lon, lat, radius = coords.T else: raise Exception("Unrecognized coords shape:" + str(coords.shape)) #ADW: targets is still in glon,glat if self.config['coords']['coordsys'].lower() == 'cel': lon, lat = gal2cel(lon, lat) vec = ang2vec(lon, lat) pixels = np.zeros(0, dtype=int) for v, r in zip(vec, radius): pix = query_disc(self.nside_likelihood, v, r, inclusive=True, fact=32) pixels = np.hstack([pixels, pix]) #pixels = np.unique(pixels) inside = ugali.utils.skymap.inFootprint(self.config, pixels) if inside.sum() != len(pixels): logger.warning("Ignoring pixels outside survey footprint:\n" + str(pixels[~inside])) if inside.sum() == 0: logger.warning("No pixels inside footprint.") return # Only write the configfile once outdir = mkdir(self.config['output']['likedir']) # Actually copy config instead of re-writing shutil.copy(self.config.filename, outdir) configfile = join(outdir, os.path.basename(self.config.filename)) pixels = pixels[inside] self.submit(pixels, queue=queue, debug=debug, configfile=configfile)
def download(self,age,metallicity,outdir=None,force=False): """ Check valid parameter range and download isochrones from: http://stev.oapd.inaf.it/cgi-bin/cmd """ if outdir is None: outdir = './' basename = self.isochrone.params2filename(age,metallicity) outfile = os.path.join(outdir,basename) if os.path.exists(outfile) and not force: try: self.verify(outfile,self.survey,age,metallicity) logger.info("Found %s; skipping..."%(outfile)) return except Exception as e: msg = "Overwriting corrupted %s..."%(outfile) logger.warn(msg) #os.remove(outfile) mkdir(outdir) self.print_info(age,metallicity) try: self.query_server(outfile,age,metallicity) except Exception as e: logger.debug(str(e)) raise RuntimeError('Bad server response') if not os.path.exists(outfile): raise RuntimeError('Download failed') try: self.verify(outfile,self.survey,age,metallicity) except Exception as e: msg = "Output file is corrupted." logger.error(msg) #os.remove(outfile) raise(e) return outfile
def download(self, age, metallicity, outdir=None, force=False): """ Check valid parameter range and download isochrones from: http://stev.oapd.inaf.it/cgi-bin/cmd """ if outdir is None: outdir = './' basename = self.isochrone.params2filename(age, metallicity) outfile = os.path.join(outdir, basename) if os.path.exists(outfile) and not force: try: self.verify(outfile, self.survey, age, metallicity) logger.info("Found %s; skipping..." % (outfile)) return except Exception as e: msg = "Overwriting corrupted %s..." % (outfile) logger.warn(msg) #os.remove(outfile) mkdir(outdir) self.print_info(age, metallicity) try: self.query_server(outfile, age, metallicity) except Exception as e: logger.debug(str(e)) raise RuntimeError('Bad server response') if not os.path.exists(outfile): raise RuntimeError('Download failed') try: self.verify(outfile, self.survey, age, metallicity) except Exception as e: msg = "Output file is corrupted." logger.error(msg) #os.remove(outfile) raise (e) return outfile
def run(self): #db = databaseFactory(self.config) #db.run(outdir=self.config['data']['dirname']) if 'data' in self.opts.run: outdir=self.config['data']['dirname'] logdir=join(outdir,'log') mkdir(logdir) jobname='download' logfile=join(logdir,jobname+'.log') script = self.config['data']['script'] cmd='%s %s %s'%(script,self.opts.config,self.opts.pixfile) self.batch.submit(cmd,jobname,logfile) if 'isochrone' in self.opts.run: jobname='isochrone' script = self.config['data']['script'].replace('database.py','padova.py') cmd='%s %s'%(script,self.opts.config) self.batch.submit(cmd,jobname)
def submit_all(self, coords=None, queue=None, debug=False): """ Submit likelihood analyses on a set of coordinates. If coords is `None`, submit all coordinates in the footprint. Inputs: coords : Array of target locations in Galactic coordinates. queue : Overwrite submit queue. debug : Don't run. """ if coords is None: pixels = numpy.arange(healpy.nside2npix(self.nside_likelihood)) else: coords = numpy.asarray(coords) if coords.ndim == 1: coords = numpy.array([coords]) if coords.shape[1] == 2: glon,glat = coords.T radius = numpy.zeros(len(glon)) elif coords.shape[1] == 3: glon,glat,radius = coords.T else: raise Exception("Unrecognized coords shape:"+str(coords.shape)) vec = ang2vec(glon,glat) pixels = numpy.zeros(0, dtype=int) for v,r in zip(vec,radius): pix = query_disc(self.nside_likelihood,v,r,inclusive=True,fact=32) pixels = numpy.hstack([pixels, pix]) #pixels = numpy.unique(pixels) inside = ugali.utils.skymap.inFootprint(self.config,pixels) if inside.sum() != len(pixels): logger.warning("Ignoring pixels outside survey footprint:\n"+str(pixels[~inside])) if inside.sum() == 0: logger.warning("No pixels inside footprint.") return # Only write the configfile once outdir = mkdir(self.config['output']['likedir']) configfile = '%s/config_queue.py'%(outdir) self.config.write(configfile) pixels = pixels[inside] self.submit(pixels,queue=queue,debug=debug,configfile=configfile)
def pixelizeDensity(config, nside=None, force=False): if nside is None: nside = config['coords']['nside_likelihood'] filenames = config.getFilenames() infiles = filenames[~filenames['catalog'].mask] for ii,f in enumerate(infiles.data): infile = f['catalog'] pix = f['pix'] logger.info('(%i/%i) %s'%(ii+1, len(infiles), infile)) outfile = config['data']['density']%pix if os.path.exists(outfile) and not force: logger.info("Found %s; skipping..."%outfile) continue outdir = mkdir(os.path.dirname(outfile)) pixels, density = stellarDensity(infile,nside) data_dict = dict( DENSITY=density ) logger.info("Writing %s..."%outfile) ugali.utils.skymap.writeSparseHealpixMap(pixels,data_dict,nside,outfile)
def run(self,field=None,simple=False,force=False): """ Loop through pixels containing catalog objects and calculate the magnitude limit. This gets a bit convoluted due to all the different pixel resolutions... """ if field is None: fields = [1,2] else: fields = [field] for filenames in self.filenames.compress(~self.filenames.mask['catalog']).data: infile = filenames['catalog'] for f in fields: outfile = filenames['mask_%i'%f] if os.path.exists(outfile) and not force: logger.info("Found %s; skipping..."%outfile) continue pixels,maglims=self.calculate(infile,f,simple) logger.info("Creating %s"%outfile) outdir = mkdir(os.path.dirname(outfile)) data_dict = dict( MAGLIM=maglims ) ugali.utils.skymap.writeSparseHealpixMap(pixels,data_dict,self.nside_pixel,outfile)
def run(self): if self.opts.coords is not None: coords = self.opts.coords names = vars(self.opts).get('names',len(coords)*['']) else: names,coords = self.parser.parse_targets(self.config.candfile) labels=[n.lower().replace(' ','_').replace('(','').replace(')','') for n in names] self.outdir=mkdir(self.config['output']['mcmcdir']) self.logdir=mkdir(join(self.outdir,'log')) args = list(zip(len(names)*[self.opts.config],names,labels,coords)) if 'mcmc' in self.opts.run: logger.info("Running 'mcmc'...") try: shutil.copy(self.opts.config,self.outdir) except Exception as e: logger.warn(e.message) for config,name,label,coord in args: glon,glat,radius = coord outfile = make_filenames(self.config,label)['samfile'] base = splitext(basename(outfile))[0] logfile=join(self.logdir,base+'.log') jobname=base script = self.config['mcmc']['script'] nthreads = self.config['mcmc']['nthreads'] srcmdl = self.config['mcmc'].get('srcmdl') if srcmdl is not None: try: shutil.copy(srcmdl,self.outdir) except Exception as e: logger.warn(e.message) logger.info('%s (%s)'%(name,srcmdl)) cmd='%s %s --name %s --srcmdl %s %s' % ( script,self.opts.config,name,srcmdl,outfile) else: logger.info('%s (%.4f,%.4f)'%(name,glon,glat)) cmd='%s %s --name %s --gal %.4f %.4f --grid %s'% ( script,self.opts.config,name,glon,glat,outfile) logger.info(cmd) self.batch.submit(cmd,jobname,logfile,n=nthreads,a='mpirun') if 'results' in self.opts.run: logger.info("Running 'results'...") if len(args) > 1: pool = Pool(maxtasksperchild=1) pool.map(do_results,args) else: do_results(*args) if 'membership' in self.opts.run: logger.info("Running 'membership'...") if len(args) > 1: pool = Pool(maxtasksperchild=1) pool.map(do_membership,args) else: do_membership(*args) if 'plot' in self.opts.run: logger.info("Running 'plot'...") if len(args) > 1: pool = Pool(maxtasksperchild=1) pool.map(do_plot,args) #map(do_plot,args) else: do_plot(*args) if 'collect' in self.opts.run: logger.info("Running 'collect'...") results = odict() srcmdl = odict() params = odict() for config,name,label,coord in args: srcfile = make_filenames(self.config,name)['srcfile'] results[name] = yaml.load(open(srcfile))['results'] srcmdl[name] = yaml.load(open(srcfile))['source'] params[name] = yaml.load(open(srcfile))['params'] for base,output in [('results.yaml',results),('srcmdl.yaml',srcmdl),('params.yaml',params)]: outfile = join(self.outdir,base) out = open(outfile,'w') out.write(yaml.dump(output)) out.close() if 'scan' in self.opts.run: logger.info("Running 'scan'...") for config,name,label,coord in args: logdir = mkdir('plots/log') logfile=join(logdir,'%s_lnlscan.log') cmd = 'python lnlscan.py %s --name %s --xpar %s --xbins 45 --ypar %s --ybins 45'%(self.opts.config,name,'age','metallicity') self.batch.submit(cmd,logfile=logfile) cmd = 'python lnlscan.py %s --name %s --xpar %s --xbins 45 --ypar %s --ybins 45'%(self.opts.config,name,'metallicity','distance_modulus') self.batch.submit(cmd,logfile=logfile) cmd = 'python lnlscan.py %s --name %s --xpar %s --xbins 45 --ypar %s --ybins 45'%(self.opts.config,name,'age','distance_modulus') self.batch.submit(cmd,logfile=logfile)
def split(config,dirname='split',force=False): """ Take a pre-existing maglim map and divide it into chunks consistent with the catalog pixels. """ config = Config(config) filenames = config.getFilenames() #healpix = filenames['pix'].compressed() # Check that things are ok basedir,basename = os.path.split(config['mask']['dirname']) #if basename == dirname: # msg = "Input and output directory are the same." # raise Exception(msg) outdir = mkdir(os.path.join(basedir,dirname)) nside_catalog = config['coords']['nside_catalog'] nside_pixel = config['coords']['nside_pixel'] release = config['data']['release'].lower() band1 = config['catalog']['mag_1_band'] band2 = config['catalog']['mag_2_band'] # Read the magnitude limits maglimdir = config['maglim']['dirname'] maglimfile_1 = join(maglimdir,config['maglim']['filename_1']) logger.info("Reading %s..."%maglimfile_1) maglim1 = read_map(maglimfile_1) maglimfile_2 = join(maglimdir,config['maglim']['filename_2']) logger.info("Reading %s..."%maglimfile_2) maglim2 = read_map(maglimfile_2) # Read the footprint footfile = config['data']['footprint'] logger.info("Reading %s..."%footfile) footprint = read_map(footfile) # Output mask names mask1 = os.path.basename(config['mask']['basename_1']) mask2 = os.path.basename(config['mask']['basename_2']) for band,maglim,base in [(band1,maglim1,mask1),(band2,maglim2,mask2)]: nside_maglim = hp.npix2nside(len(maglim)) if nside_maglim != nside_pixel: msg = "Mask nside different from pixel nside" logger.warning(msg) #raise Exception(msg) pixels = np.nonzero(maglim>0)[0] superpix = superpixel(pixels,nside_maglim,nside_catalog) healpix = np.unique(superpix) for hpx in healpix: outfile = join(outdir,base)%hpx if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..."%outfile) continue pix = pixels[superpix == hpx] print(hpx, len(pix)) logger.info('Writing %s...'%outfile) data = odict() data['PIXEL']=pix data['MAGLIM']=maglim[pix].astype('f4') data['FRACDET']=footprint[pix].astype('f4') ugali.utils.healpix.write_partial_map(outfile,data,nside_pixel)
def submit(self, pixels, queue=None, debug=False, configfile=None): """ Submit the likelihood job for the given pixel(s). """ queue = self.config['batch']['cluster'] if queue is None else queue local = (queue == 'local') # Need to develop some way to take command line arguments... self.batch = ugali.utils.batch.batchFactory(queue,**self.config['batch']['opts']) if numpy.isscalar(pixels): pixels = numpy.array([pixels]) outdir = mkdir(self.config['output']['likedir']) logdir = mkdir(join(outdir,'log')) subdir = mkdir(join(outdir,'sub')) # Save the current configuation settings; avoid writing # file multiple times if configfile passed as argument. if configfile is None: if local: configfile = self.configfile else: configfile = '%s/config_queue.py'%(outdir) self.config.write(configfile) lon,lat = pix2ang(self.nside_likelihood,pixels) commands = [] chunk = self.config['batch']['chunk'] istart = 0 logger.info('=== Submit Likelihood ===') for ii,pix in enumerate(pixels): logger.info(' (%i/%i) pixel=%i nside=%i; (lon, lat) = (%.2f, %.2f)'%(ii+1,len(pixels),pix, self.nside_likelihood,lon[ii],lat[ii])) # Create outfile name outfile = self.config.likefile%(pix,self.config['coords']['coordsys'].lower()) outbase = os.path.basename(outfile) jobname = self.config['batch']['jobname'] # Submission command sub = not os.path.exists(outfile) cmd = self.command(outfile,configfile,pix) commands.append([ii,cmd,lon[ii],lat[ii],sub]) if local or chunk == 0: # Not chunking command = cmd submit = sub logfile = join(logdir,os.path.splitext(outbase)[0]+'.log') elif (len(commands)%chunk==0) or (ii+1 == len(pixels)): # End of chunk, create submission script commands = np.array(commands,dtype=object) istart, iend = commands[0][0], commands[-1][0] subfile = join(subdir,'submit_%08i_%08i.sh'%(istart,iend)) logfile = join(logdir,'submit_%08i_%08i.log'%(istart,iend)) command = "sh %s"%subfile submit = np.any(commands[:,-1]) if submit: self.write_script(subfile,commands) else: # Not end of chunk continue commands=[] # Actual job submission if not submit: logger.info(self.skip) continue else: while True: njobs = self.batch.njobs() if njobs < self.config['batch']['max_jobs']: break else: logger.info('%i jobs already in queue, waiting...'%(njobs)) time.sleep(5*chunk) job = self.batch.submit(command,jobname,logfile) logger.info(" "+job) time.sleep(0.5)
COLORS = [ ('g-r', ['g', 'r']), ('r-i', ['r', 'i']), ('i-z', ['i', 'z']), ('z-Y', ['z', 'Y']), ] if __name__ == "__main__": import argparse description = "python script" parser = argparse.ArgumentParser(description=description) opts = parser.parse_args() nside = 128 HPX = 'HPX%i' % nside outdir = mkdir('release/calibration') gcmfile = 'y1a1_gcm_v0.fits' if not os.path.exists(gcmfile): query = download.gcm_query() download.download(gcmfile, query, section='dessci') print "Loading %s..." % gcmfile gcm = fitsio.read(gcmfile) names = ['MAG_ZERO', HPX] values = [gcm['ZEROPOINT'] + 25, ang2pix(nside, gcm['RA'], gcm['DEC'])] gcm = recfn.append_fields(gcm, names, values, usemask=False, asrecarray=True)
import healpy from ugali.utils.shell import mkdir from utils import found if __name__ == "__main__": from parser import Parser parser = Parser() parser.add_argument('-p','--pix',default=None,action='append',type=int, help='pixels to submit') args = parser.parse_args() force = '-f' if args.force else '' config = yaml.load(open(args.config)) hpxdir = config['hpxdir'] catdir = mkdir(config['catdir']) keydir = mkdir(config['keydir']) logdir = mkdir(os.path.join(catdir,'log')) pixels = args.pix if pixels is None: pixels = np.arange(healpy.nside2npix(config['nside'])) for pix in pixels: infiles = glob.glob(hpxdir+'/*/*%05d*.fits'%pix) basename = 'hpx_%05d.fits'%pix outfile = os.path.join(catdir,'cat_'+basename) keyfile = os.path.join(keydir,'key_'+basename) logfile = os.path.join(logdir,basename.replace('.fits','.log')) if len(infiles) == 0: continue
def pixelizeCatalog(infiles, config, force=False): """ Break catalog into chunks by healpix pixel. Parameters: ----------- infiles : List of input files config : Configuration file force : Overwrite existing files (depricated) Returns: -------- None """ nside_catalog = config['coords']['nside_catalog'] nside_pixel = config['coords']['nside_pixel'] coordsys = config['coords']['coordsys'].upper() outdir = mkdir(config['catalog']['dirname']) filenames = config.getFilenames() lon_field = config['catalog']['lon_field'].upper() lat_field = config['catalog']['lat_field'].upper() # ADW: It would probably be better (and more efficient) to do the # pixelizing and the new column insertion separately. for i,filename in enumerate(infiles): logger.info('(%i/%i) %s'%(i+1, len(infiles), filename)) data = fitsio.read(filename) logger.info("%i objects found"%len(data)) if not len(data): continue columns = map(str.upper,data.dtype.names) names,arrs = [],[] if (lon_field in columns) and (lat_field in columns): lon,lat = data[lon_field],data[lat_field] elif coordsys == 'GAL': msg = "Columns '%s' and '%s' not found."%(lon_field,lat_field) msg += "\nConverting from RA,DEC" logger.warning(msg) lon,lat = cel2gal(data['RA'],data['DEC']) names += [lon_field,lat_field] arrs += [lon,lat] elif coordsys == 'CEL': msg = "Columns '%s' and '%s' not found."%(lon_field,lat_field) msg += "\nConverting from GLON,GLAT" lon,lat = gal2cel(data['GLON'],data['GLAT']) names += [lon_field,lat_field] arrs += [lon,lat] cat_pix = ang2pix(nside_catalog,lon,lat) pix_pix = ang2pix(nside_pixel,lon,lat) cat_pix_name = 'PIX%i'%nside_catalog pix_pix_name = 'PIX%i'%nside_pixel try: names += [cat_pix_name,pix_pix_name] arrs += [cat_pix,pix_pix] data=mlab.rec_append_fields(data,names=names,arrs=arrs) except ValueError as e: logger.warn(str(e)+'; not adding column.') #data[cat_pix_name] = cat_pix #data[pix_pix_name] = pix_pix for pix in np.unique(cat_pix): logger.debug("Processing pixel %s"%pix) arr = data[cat_pix == pix] outfile = filenames.data['catalog'][pix] if not os.path.exists(outfile): logger.debug("Creating %s"%outfile) out=fitsio.FITS(outfile,mode='rw') out.write(arr) hdr=healpix.header_odict(nside=nside_catalog, coord=coordsys[0]) for key in ['PIXTYPE','ORDERING','NSIDE','COORDSYS']: out[1].write_key(*list(hdr[key].values())) out[1].write_key('PIX',pix,comment='HEALPIX pixel for this file') else: out=fitsio.FITS(outfile,mode='rw') out[1].append(arr) logger.debug("Writing %s"%outfile) out.close()
def run(self): if 'scan' in self.opts.run: logger.info("Running 'scan'...") farm = Farm(self.config, verbose=self.opts.verbose) farm.submit_all(coords=self.opts.coords, queue=self.opts.queue, debug=self.opts.debug) if 'merge' in self.opts.run: logger.info("Running 'merge'...") mergefile = self.config.mergefile roifile = self.config.roifile filenames = self.config.likefile.split('_%')[0] + '_*.fits' infiles = np.array(sorted(glob.glob(filenames))) if 'mergedir' in self.config['output']: mkdir(self.config['output']['mergedir']) pixels = np.char.rpartition( np.char.rpartition(infiles, '_')[:, 0], '_')[:, -1] pixels = pixels.astype(int) superpixel = healpix.superpixel( pixels, self.config['coords']['nside_likelihood'], self.config['coords']['nside_merge']) for pix in np.unique(superpixel): outfile = mergefile % pix if exists(outfile) and not self.opts.force: logger.warn(" Found %s; skipping..." % outfile) else: healpix.merge_partial_maps(infiles[superpixel == pix], outfile, multiproc=8) if exists(roifile) and not self.opts.force: logger.warn(" Found %s; skipping..." % roifile) else: logger.info(" Merging likelihood headers...") healpix.merge_likelihood_headers(infiles, roifile) if 'tar' in self.opts.run: logger.info("Running 'tar'...") outdir = mkdir(self.config['output']['likedir']) logdir = mkdir(join(outdir, 'log')) scanfile = self.config.likefile.split('_%')[0] + '_[0-9]*.fits' tarfile = join(self.config.likefile.split('_%')[0] + '_pixels.tar.gz') jobname = 'tar' logfile = os.path.join(logdir, 'scan_tar.log') cmd = 'tar --remove-files -cvzf %s %s' % (tarfile, scanfile) if exists(tarfile) and not self.opts.force: logger.warn(" Found %s; skipping..." % tarfile) else: logger.info(" Tarring likelihood files...") logger.info(cmd) self.batch.submit(cmd, jobname, logfile) if 'plot' in self.opts.run: # WARNING: Loading the full 3D healpix map is memory intensive. logger.info("Running 'plot'...") # Should do this in environment variable import matplotlib matplotlib.use('Agg') import pylab as plt import ugali.utils.plotting as plotting skymap = ugali.utils.skymap.readSparseHealpixMap( self.config.mergefile, 'LOG_LIKELIHOOD')[1] plotting.plotSkymap(skymap) outdir = mkdir(self.config['output']['plotdir']) basename = os.path.basename( self.config.mergefile.replace('.fits', '.png')) outfile = os.path.join(outdir, basename) plt.savefig(outfile) if 'check' in self.opts.run: # Check the completion fraction logger.info("Running 'check'...") import fitsio import numpy as np import healpy as hp from ugali.utils.skymap import inFootprint # Load the ROI file roi = fitsio.read(self.config.roifile) done = roi['PIXEL'] # Get all target pixels nside = self.config['coords']['nside_likelihood'] pixels = np.arange(hp.nside2npix(nside)) pixarea = hp.nside2pixarea(nside, degrees=True) foot = pixels[inFootprint(self.config, pixels)] # And find the pixels that haven't been processed undone = ~np.in1d(foot, done) hpxmap = np.zeros(len(pixels)) hpxmap[foot[undone]] = True logger.info("Found %i incomplete pixels with an area of %.1f deg^2." % (hpxmap.sum(), hpxmap.sum() * pixarea)) hp.write_map('check.fits.gz', hpxmap)
def run(self): outdir=mkdir(self.config['output']['simdir']) logdir=mkdir(join(outdir,'log')) # Actually copy config instead of re-writing shutil.copy(self.config.filename,outdir) configfile = join(outdir,os.path.basename(self.config.filename)) if 'simulate' in self.opts.run: logger.info("Running 'simulate'...") if self.opts.num is None: self.opts.num = self.config['simulator']['njobs'] for i in range(self.opts.num): outfile=join(outdir,self.config['output']['simfile']%i) base = splitext(os.path.basename(outfile))[0] logfile=join(logdir,base+'.log') jobname=base script = self.config['simulator']['script'] cmd='%s %s %s --seed %i'%(script,configfile,outfile,i) #cmd='%s %s %s'%(script,self.opts.config,outfile) self.batch.submit(cmd,jobname,logfile) time.sleep(0.1) if 'analyze' in self.opts.run: logger.info("Running 'analyze'...") dirname = self.config['simulate']['dirname'] catfiles = sorted(glob.glob(join(dirname,self.config['simulate']['catfile']))) popfile = join(dirname,self.config['simulate']['popfile']) batch = self.config['simulate']['batch'] for i,catfile in enumerate(catfiles): basename = os.path.basename(catfile) outfile = join(outdir,basename) base = splitext(os.path.basename(outfile))[0] logfile=join(logdir,base+'.log') jobname=base if exists(outfile) and not self.opts.force: msg = "Found %s;"%outfile if exists(logfile) and len(self.batch.bfail(logfile)): msg += " failed." logger.info(msg) else: msg += " skipping..." logger.info(msg) continue script = self.config['simulate']['script'] cmd='%s %s -m 0 --rerun -p %s -c %s -o %s'%(script,configfile,popfile,catfile,outfile) self.batch.max_jobs = batch.get('max_jobs',200) opts = batch.get(self.opts.queue,dict()) self.batch.submit(cmd,jobname,logfile,**opts) time.sleep(0.1) if 'sensitivity' in self.opts.run: logger.info("Running 'sensitivity'...") if 'merge' in self.opts.run: logger.info("Running 'merge'...") filenames=join(outdir,self.config['simulate']['catfile']) infiles=sorted(glob.glob(filenames)) print("Reading %i files..."%len(infiles)) data = np.concatenate([fitsio.read(f,ext=1) for f in infiles]) hdr = fitsio.read_header(infiles[0],ext=1) outfile = "./merged_sims.fits" logger.info("Writing %s..."%outfile) fitsio.write(outfile,data,header=hdr,clobber=True) if 'plot' in self.opts.run: logger.info("Running 'plot'...") import ugali.utils.plotting import pylab as plt plotdir = mkdir(self.config['output']['plotdir']) data = fitsio.read(join(outdir,"merged_sims.fits")) data = data[~np.isnan(data['ts'])] bigfig,bigax = plt.subplots() for dist in np.unique(data['fit_distance']): logger.info(' Plotting distance: %s'%dist) ts = data['ts'][data['fit_distance'] == dist] ugali.utils.plotting.drawChernoff(bigax,ts,bands='none',color='gray') fig,ax = plt.subplots(1,2,figsize=(10,5)) ugali.utils.plotting.drawChernoff(ax[0],ts,bands='none',pdf=True) ugali.utils.plotting.drawChernoff(ax[1],ts) fig.suptitle(r'Chernoff ($\mu = %g$)'%dist) ax[0].annotate(r"$N=%i$"%len(ts), xy=(0.15,0.85), xycoords='axes fraction', bbox={'boxstyle':"round",'fc':'1'}) basename = 'chernoff_u%g.png'%dist outfile = os.path.join(plotdir,basename) plt.savefig(outfile) bigfig.suptitle('Chernoff!') basename = 'chernoff_all.png' outfile = os.path.join(plotdir,basename) plt.savefig(outfile) #idx=np.random.randint(len(data['ts'])-1,size=400) #idx=slice(400) #ugali.utils.plotting.plotChernoff(data['ts'][idx]) #ugali.utils.plotting.plotChernoff(data['fit_ts']) plt.ion() """
help="Slow parameter") parser.add_argument('--ybins', default=10) parser.add_argument('--zpar', default='age', help="Slowest parameter") parser.add_argument('--zbins', default=10) parser.add_argument('--alpha', default=0.1) opts = parser.parse_args() alpha = opts.alpha config = opts.config dirname = 'mcmc_v01' srcmdl = 'srcmdl.yaml' if opts.name: names = [opts.name] else: names = opts.names outdir = mkdir('plots') a = 13.5 z = 0.0001 for name in names: if opts.name is not None: if name.lower() != opts.name.lower(): continue print(name) #ra,dec = params['ra'],params['dec'] #lon,lat = cel2gal(ra,dec) #params['lon'],params['lat'] = lon,lat #params['age'] = a #params['metallicity'] = z #srcmdl = join(dirname,'%s_mcmc.yaml'%name)
parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('config') parser.add_argument('-f', '--force', action='store_true') parser.add_argument('-s', '--sleep', default=0, type=float) parser.add_argument('-n', '--njobs', default=15, type=int) parser.add_argument('-q', '--queue', default='condor') args = parser.parse_args() config = yaml.load(open(args.config)) hpxdir = config['hpxdir'] mjdfile = config.get('mjdfile', 'data/desdm_mjd.fits') for band in config['bands']: indir = os.path.join(hpxdir, band) infiles = sorted(glob.glob(indir + '/*.fits')) logdir = mkdir(os.path.join(indir, 'log')) for infile in infiles: if not args.force: fits = fitsio.FITS(infile) colname = 'MJD_OBS' if colname in fits[1].get_colnames(): print "Found column '%s'; skipping %s..." % ( colname, os.path.basename(infile)) continue logbase = ('mjd_' + os.path.basename(infile)).replace( '.fits', '.log') logfile = os.path.join(logdir, logbase) params = dict(infile=infile, mjdfile='-m %s' % mjdfile,
def check_nan(data): print "Checking for NaNs..." for name in data.dtype.names: nans = np.isnan(coadd[name]).sum() if nans: print name, nans if __name__ == "__main__": import argparse description = "python script" parser = argparse.ArgumentParser(description=description) parser.add_argument('-p','--pix',default=2585,type=int) parser.add_argument('-s','--skymap',action='store_true') opts = parser.parse_args() pltdir = mkdir('plots/') if opts.skymap: print "Creating skymap..." infiles = glob.glob('cat/*/cat_hpx*.fits') cts,im = plot_skymap(infiles) plt.savefig(pltdir+'skymap.png',bbox_inches='tight') infiles = glob.glob('hpx/*/hpx*%i.fits'%opts.pix) print "Loading SE files: %s"%infiles data = catalog.load_infiles(infiles,catalog.INPUT_COLS+['EXPNUM']) good = catalog.good_objects(data) infile = glob.glob('cat/cat_hpx_*%i.fits'%opts.pix)[0] print "Loading COADD file: %s"%infile coadd = fitsio.read(infile)
def split(config,dirname='split',force=False): """ Take a pre-existing maglim map and divide it into chunks consistent with the catalog pixels. """ config = Config(config) filenames = config.getFilenames() #healpix = filenames['pix'].compressed() # Check that things are ok basedir,basename = os.path.split(config['mask']['dirname']) #if basename == dirname: # msg = "Input and output directory are the same." # raise Exception(msg) outdir = mkdir(os.path.join(basedir,dirname)) nside_catalog = config['coords']['nside_catalog'] nside_pixel = config['coords']['nside_pixel'] release = config['data']['release'].lower() band1 = config['catalog']['mag_1_band'] band2 = config['catalog']['mag_2_band'] # Read the magnitude limits maglimdir = config['maglim']['dirname'] maglimfile_1 = join(maglimdir,config['maglim']['filename_1']) logger.info("Reading %s..."%maglimfile_1) maglim1 = read_map(maglimfile_1) maglimfile_2 = join(maglimdir,config['maglim']['filename_2']) logger.info("Reading %s..."%maglimfile_2) maglim2 = read_map(maglimfile_2) # Read the footprint footfile = config['data']['footprint'] logger.info("Reading %s..."%footfile) footprint = read_map(footfile) # Output mask names mask1 = os.path.basename(config['mask']['basename_1']) mask2 = os.path.basename(config['mask']['basename_2']) for band,maglim,base in [(band1,maglim1,mask1),(band2,maglim2,mask2)]: nside_maglim = hp.npix2nside(len(maglim)) if nside_maglim != nside_pixel: msg = "Mask nside different from pixel nside" logger.warning(msg) #raise Exception(msg) pixels = np.nonzero(maglim>0)[0] superpix = superpixel(pixels,nside_maglim,nside_catalog) healpix = np.unique(superpix) for hpx in healpix: outfile = join(outdir,base)%hpx if os.path.exists(outfile) and not force: logger.warning("Found %s; skipping..."%outfile) continue pix = pixels[superpix == hpx] print(hpx, len(pix)) logger.info('Writing %s...'%outfile) data = odict() data['PIXEL']=pix data['MAGLIM']=maglim[pix].astype('f4') data['FRACDET']=footprint[pix].astype('f4') ugali.utils.healpix.write_partial_map(outfile,data,nside_pixel)
return pixel, ret if __name__ == "__main__": import argparse description = "python script" parser = argparse.ArgumentParser(description=description) parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-p', '--pix', default=None, type=int, action='append') parser.add_argument('-n', '--nside', default=16, type=int) opts = parser.parse_args() nside = opts.nside npix = healpy.nside2npix(16) outdir = mkdir('release/compare_gold') y1a1dir = 'y1a1/v1/hpx' y2q1dir = 'cat/' if opts.pix is not None: pixels = sorted([ p for p in opts.pix if len(glob.glob(y1a1dir + '/*%05d.fits' % p)) ]) else: pixels = sorted([ p for p in range(npix) if len(glob.glob(y1a1dir + '/*%05d.fits' % p)) and len(glob.glob(y2q1dir + '/*%05d.fits' % p)) ]) if len(pixels) == 0: msg = "Invalid pixel: %s" % opts.pix
def run(self): outdir=mkdir(self.config['output']['simdir']) logdir=mkdir(join(outdir,'log')) if 'simulate' in self.opts.run: logger.info("Running 'simulate'...") if self.opts.num is None: self.opts.num = self.config['simulator']['njobs'] for i in range(self.opts.num): outfile=join(outdir,self.config['output']['simfile']%i) base = splitext(os.path.basename(outfile))[0] logfile=join(logdir,base+'.log') jobname=base script = self.config['simulator']['script'] cmd='%s %s %s --seed %i'%(script,self.opts.config,outfile,i) #cmd='%s %s %s'%(script,self.opts.config,outfile) self.batch.submit(cmd,jobname,logfile) time.sleep(0.1) if 'analyze' in self.opts.run: logger.info("Running 'analyze'...") dirname = self.config['simulate']['dirname'] catfiles = sorted(glob.glob(join(dirname,self.config['simulate']['catfile']))) popfile = join(dirname,self.config['simulate']['popfile']) batch = self.config['simulate']['batch'] for i,catfile in enumerate(catfiles): basename = os.path.basename(catfile) outfile = join(outdir,basename) if exists(outfile) and not self.opts.force: logger.info("Found %s; skipping..."%outfile) continue base = splitext(os.path.basename(outfile))[0] logfile=join(logdir,base+'.log') jobname=base script = self.config['simulate']['script'] cmd='%s %s -p %s -c %s -o %s'%(script,self.opts.config,popfile,catfile,outfile) self.batch.max_jobs = batch.get('max_jobs',200) opts = batch.get(self.opts.queue,dict()) self.batch.submit(cmd,jobname,logfile,**opts) time.sleep(0.1) if 'sensitivity' in self.opts.run: logger.info("Running 'sensitivity'...") if 'merge' in self.opts.run: logger.info("Running 'merge'...") filenames=join(outdir,self.config['output']['simfile']).split('_%')[0]+'_*' infiles=sorted(glob.glob(filenames)) f = fitsio.read(infiles[0]) table = np.empty(0,dtype=data.dtype) for filename in infiles: logger.debug("Reading %s..."%filename) d = fitsio.read(filename) t = d[~np.isnan(d['ts'])] table = recfuncs.stack_arrays([table,t],usemask=False,asrecarray=True) logger.info("Found %i simulations."%len(table)) outfile = join(outdir,"merged_sims.fits") logger.info("Writing %s..."%outfile) fitsio.write(outfile,table,clobber=True) if 'plot' in self.opts.run: logger.info("Running 'plot'...") import ugali.utils.plotting import pylab as plt plotdir = mkdir(self.config['output']['plotdir']) data = fitsio.read(join(outdir,"merged_sims.fits")) data = data[~np.isnan(data['ts'])] bigfig,bigax = plt.subplots() for dist in np.unique(data['fit_distance']): logger.info(' Plotting distance: %s'%dist) ts = data['ts'][data['fit_distance'] == dist] ugali.utils.plotting.drawChernoff(bigax,ts,bands='none',color='gray') fig,ax = plt.subplots(1,2,figsize=(10,5)) ugali.utils.plotting.drawChernoff(ax[0],ts,bands='none',pdf=True) ugali.utils.plotting.drawChernoff(ax[1],ts) fig.suptitle(r'Chernoff ($\mu = %g$)'%dist) ax[0].annotate(r"$N=%i$"%len(ts), xy=(0.15,0.85), xycoords='axes fraction', bbox={'boxstyle':"round",'fc':'1'}) basename = 'chernoff_u%g.png'%dist outfile = os.path.join(plotdir,basename) plt.savefig(outfile) bigfig.suptitle('Chernoff!') basename = 'chernoff_all.png' outfile = os.path.join(plotdir,basename) plt.savefig(outfile) #idx=np.random.randint(len(data['ts'])-1,size=400) #idx=slice(400) #ugali.utils.plotting.plotChernoff(data['ts'][idx]) #ugali.utils.plotting.plotChernoff(data['fit_ts']) plt.ion() """
def run(self): search = CandidateSearch(self.config) self.search = search if 'label' in self.opts.run: logger.info("Running 'label'...") if exists(search.labelfile) and not self.opts.force: logger.info(" Found %s; skipping..." % search.labelfile) else: #search.createLabels3D() search.createLabels2D() search.writeLabels() if 'objects' in self.opts.run: logger.info("Running 'objects'...") if exists(search.objectfile) and not self.opts.force: logger.info(" Found %s; skipping..." % search.labelfile) else: search.loadLabels() search.createObjects() search.writeObjects() if 'associate' in self.opts.run: logger.info("Running 'associate'...") if exists(search.assocfile) and not self.opts.force: logger.info(" Found %s; skipping..." % search.assocfile) else: search.loadObjects() search.createAssociations() search.writeAssociations() if 'candidate' in self.opts.run: logger.info("Running 'candidate'...") if exists(search.candfile) and not self.opts.force: logger.info(" Found %s; skipping..." % search.candfile) else: search.loadAssociations() search.writeCandidates() if 'plot' in self.opts.run: logger.info("Running 'plot'...") import fitsio threshold = self.config['search']['cand_threshold'] outdir = mkdir(self.config['output']['plotdir']) logdir = mkdir(os.path.join(outdir, 'log')) # Eventually move this into 'plotting' module candidates = fitsio.read(self.config.candfile, lower=True, trim_strings=True) candidates = candidates[candidates['ts'] >= threshold] for i, c in enumerate(candidates): msg = "(%i/%i) Plotting %s (%.2f,%.2f)..." % ( i, len(candidates), c['name'], c['glon'], c['glat']) logger.info(msg) params = (self.opts.config, outdir, c['name'], c['ra'], c['dec'], 0.5, c['modulus']) cmd = 'ugali/scratch/PlotCandidate.py %s %s -n="%s" --cel %f %f --radius %s -m %.2f' cmd = cmd % params logger.info(cmd) jobname = c['name'].lower().replace(' ', '_') logfile = os.path.join(logdir, jobname + '.log') self.batch.submit(cmd, jobname, logfile) time.sleep(5)
parser.add_argument('--ypar',default='distance_modulus',help="Slow parameter") parser.add_argument('--ybins',default=10) parser.add_argument('--zpar',default='age',help="Slowest parameter") parser.add_argument('--zbins',default=10) parser.add_argument('--alpha',default=0.1) opts = parser.parse_args() alpha = opts.alpha config = opts.config dirname = 'mcmc_v01' srcmdl = 'srcmdl.yaml' if opts.name: names = [opts.name] else: names = opts.names outdir = mkdir('plots') a = 13.5 z = 0.0001 for name in names: if opts.name is not None: if name.lower() != opts.name.lower(): continue print(name) #ra,dec = params['ra'],params['dec'] #lon,lat = cel2gal(ra,dec) #params['lon'],params['lat'] = lon,lat #params['age'] = a #params['metallicity'] = z #srcmdl = join(dirname,'%s_mcmc.yaml'%name)
parser.add_argument('-o','--outbase',default='photo') parser.add_argument('-p','--pix',default=None,type=int,action='append') parser.add_argument('--type',choices=['gaia','rms'],default='gaia') parser.add_argument('-v','--verbose',action='store_true') opts = parser.parse_args() config = yaml.load(open(opts.config)) OBJECT_ID = config.get('objid',OBJECT_ID) NSIDE = config['nside'] nside = opts.nside npix = hp.nside2npix(config['nside']) catdir = config['catdir'] #catdir = '/data/des40.b/data/des/y3a2/gold/v2.2/healpix' outdir = mkdir('release/photometry') outbase = opts.outbase band = opts.band hpxmaps = odict() print('Calculating photometry for %s-band...'%band) if opts.pix is not None: pixels = sorted([p for p in opts.pix if len(glob.glob(catdir+'/*%05d.fits'%p))]) else: pixels = sorted([p for p in range(npix) if len(glob.glob(catdir+'/*%05d.fits'%p))]) if len(pixels) == 0: msg = "Invalid pixel: %s"%opts.pix raise Exception(msg)
def run(self): if self.opts.coords is not None: coords = self.opts.coords names = vars(self.opts).get('names', len(coords) * ['']) else: names, coords = self.parser.parse_targets(self.config.candfile) labels = [ n.lower().replace(' ', '_').replace('(', '').replace(')', '') for n in names ] self.outdir = mkdir(self.config['output']['mcmcdir']) self.logdir = mkdir(join(self.outdir, 'log')) args = list(zip(len(names) * [self.opts.config], names, labels, coords)) if 'mcmc' in self.opts.run: logger.info("Running 'mcmc'...") try: shutil.copy(self.opts.config, self.outdir) except Exception as e: logger.warn(e.message) for config, name, label, coord in args: glon, glat, radius = coord outfile = make_filenames(self.config, label)['samfile'] base = splitext(basename(outfile))[0] logfile = join(self.logdir, base + '.log') jobname = base script = self.config['mcmc']['script'] nthreads = self.config['mcmc']['nthreads'] srcmdl = self.config['mcmc'].get('srcmdl') if srcmdl is not None: try: shutil.copy(srcmdl, self.outdir) except Exception as e: logger.warn(e.message) logger.info('%s (%s)' % (name, srcmdl)) cmd = '%s %s --name %s --srcmdl %s %s' % ( script, self.opts.config, name, srcmdl, outfile) else: logger.info('%s (%.4f,%.4f)' % (name, glon, glat)) cmd = '%s %s --name %s --gal %.4f %.4f --grid %s' % ( script, self.opts.config, name, glon, glat, outfile) logger.info(cmd) self.batch.submit(cmd, jobname, logfile, n=nthreads, a='mpirun') if 'results' in self.opts.run: logger.info("Running 'results'...") if len(args) > 1: pool = Pool(maxtasksperchild=1) pool.map(do_results, args) else: do_results(*args) if 'membership' in self.opts.run: logger.info("Running 'membership'...") if len(args) > 1: pool = Pool(maxtasksperchild=1) pool.map(do_membership, args) else: do_membership(*args) if 'plot' in self.opts.run: logger.info("Running 'plot'...") if len(args) > 1: pool = Pool(maxtasksperchild=1) pool.map(do_plot, args) #map(do_plot,args) else: do_plot(*args) if 'collect' in self.opts.run: logger.info("Running 'collect'...") results = odict() srcmdl = odict() params = odict() for config, name, label, coord in args: srcfile = make_filenames(self.config, name)['srcfile'] results[name] = yaml.load(open(srcfile))['results'] srcmdl[name] = yaml.load(open(srcfile))['source'] params[name] = yaml.load(open(srcfile))['params'] for base, output in [('results.yaml', results), ('srcmdl.yaml', srcmdl), ('params.yaml', params)]: outfile = join(self.outdir, base) out = open(outfile, 'w') out.write(yaml.dump(output)) out.close() if 'scan' in self.opts.run: logger.info("Running 'scan'...") for config, name, label, coord in args: logdir = mkdir('plots/log') logfile = join(logdir, '%s_lnlscan.log') cmd = 'python lnlscan.py %s --name %s --xpar %s --xbins 45 --ypar %s --ybins 45' % ( self.opts.config, name, 'age', 'metallicity') self.batch.submit(cmd, logfile=logfile) cmd = 'python lnlscan.py %s --name %s --xpar %s --xbins 45 --ypar %s --ybins 45' % ( self.opts.config, name, 'metallicity', 'distance_modulus') self.batch.submit(cmd, logfile=logfile) cmd = 'python lnlscan.py %s --name %s --xpar %s --xbins 45 --ypar %s --ybins 45' % ( self.opts.config, name, 'age', 'distance_modulus') self.batch.submit(cmd, logfile=logfile)
if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('config') parser.add_argument('-f', '--force', action='store_true') parser.add_argument('-s', '--sleep', default=1, type=float) parser.add_argument('-n', '--njobs', default=10, type=int) parser.add_argument('-q', '--queue', default='local') args = parser.parse_args() config = yaml.load(open(args.config)) catdir = config['catdir'] bands = config['bands'] infiles = sorted(glob.glob(catdir + '/*.fits')) logdir = mkdir(os.path.join(catdir, 'log')) for col in COLUMNS: for b in bands: logbase = 'add_%s_sfd_%s.log' % (col.lower(), b.lower()) logfile = os.path.join(logdir, logbase) params = dict(band=b.upper(), infiles=' '.join(infiles), force='-f' if args.force else '', logfile=logfile, column=col) params['outcol'] = '%(column)s_SFD_%(band)s' % params print("Adding %(outcol)s..." % params) # The nested use of single and double quotes is really # nasty with csub from the shell, and is (nearly?) to # impossible from a python call to subprocess