def __init__(self, strip, part='a', mode='full'): assert(part in ['a', 'b']) assert(mode in ['light', 'full']) self.strip = strip self.part = part self.mode = mode # Where are the input catalogues? self.datapath = os.path.join(constants.DESTINATION, 'seamed') # Where to write the output? self.destination = os.path.join(constants.DESTINATION, 'concatenated') # Setup the destination directory if mode == 'light': self.destination = os.path.join(self.destination, 'light') else: self.destination = os.path.join(self.destination, 'full') util.setup_dir(self.destination) util.setup_dir(self.destination+'-compressed') log.info('Reading data from {0}'.format(self.datapath)) # Limits self.lon1 = strip self.lon2 = strip + constants.STRIPWIDTH self.fieldlist = self.get_fieldlist()
def prepare_images(clusterview): # Make sure the output directory exists util.setup_dir(constants.PATH_IMAGES) metadata = [] for band in ['halpha', 'r', 'i']: log.info('Starting with band {0}'.format(band)) # Retrieve the list of runs if band == 'halpha': idx_band = 'ha' else: idx_band = band # [constants.IPHASQC_COND_RELEASE] runs = constants.IPHASQC['run_'+idx_band] # Prepare each run result = clusterview.map(prepare_one, runs, block=True) metadata.extend(result) # Write the metadata to a table mycolumns = (str('filename'), str('run'), str('ccd'), str('in_dr2'), str('ra'), str('dec'), str('ra_min'), str('ra_max'), str('dec_min'), str('dec_max'), str('band'), str('utstart'), str('exptime'), str('seeing'), str('elliptic'), str('skylevel'), str('skynoise'), str('airmass'), str('photzp'), str('confmap')) rows = list(itertools.chain.from_iterable(metadata)) # flatten list t = table.Table(rows, names=mycolumns) table_filename = os.path.join(constants.PATH_IMAGES, 'iphas-images.fits') t.write(table_filename, format='fits', overwrite=True)
def _spatial_plot(self, l, b, shifts, name, title=''): """Creates a spatial plot of l/b against shifts.""" plotdir = os.path.join(CALIBDIR, 'plots') util.setup_dir(plotdir) fig = plt.figure(figsize=(12,6)) fig.subplots_adjust(0.06, 0.15, 0.97, 0.9) p = fig.add_subplot(111) p.set_title(title) scat = p.scatter(l, b, c=shifts, vmin=-0.13, vmax=+0.13, edgecolors='none', s=7, marker='h') plt.colorbar(scat) p.set_xlim([28, 217]) p.set_ylim([-5.2, +5.2]) p.set_xlabel('l') p.set_ylabel('b') path = os.path.join(plotdir, self.band+'-'+name+'-without-anchors.png') fig.savefig(path, dpi=200) log.info('Wrote {0}'.format(path)) # Indicate anchors p.scatter(IPHASQC['l'][IPHASQC_COND_RELEASE][self.anchors], IPHASQC['b'][IPHASQC_COND_RELEASE][self.anchors], edgecolors='black', facecolor='none', s=15, marker='x', alpha=0.9, lw=0.3) path = os.path.join(plotdir, self.band+'-'+name+'-with-anchors.png') fig.savefig(path, dpi=200) log.info('Wrote {0}'.format(path)) plt.close() return fig
def save(self): """Save the ccd image to a new file.""" directory = os.path.join(constants.PATH_IMAGES, 'r'+str(self.run)[0:3]) util.setup_dir(directory) target = os.path.join(directory, self.output_filename) # checksum=True adds the CHECKSUM and DATASUM keywords self.hdu.writeto(target, clobber=True, checksum=True)
def __init__(self): self.datadir = constants.PATH_BANDMERGED self.outdir = constants.PATH_BANDMERGED_CALIBRATED util.setup_dir(self.outdir) # Read the calibration information into a dictionary self.calib = {} for band in constants.BANDS: calib_file = os.path.join(CALIBDIR, 'calibration-{0}.csv'.format(band)) self.calib[band] = ascii.read(calib_file)
def calibrate(): """Calibrates all bands in the survey. Produces files called "calibration{r,i,ha}.csv" which tabulate the zeropoint shifts to be *added* to each exposure. """ # Make sure the output directory exists util.setup_dir(CALIBDIR) # Calibrate each band in the survey for band in constants.BANDS: calibrate_band(band)
def calibrate_multiprocessing(): """Calibrates all bands in the survey. Produces files called "calibration{r,i,ha}.csv" which tabulate the zeropoint shifts to be *added* to each exposure. """ # Make sure the output directory exists util.setup_dir(CALIBDIR) # Calibrate each band in the survey from multiprocessing import Pool pool = Pool(2) pool.map(calibrate_band, ['r', 'i']) calibrate_band('ha') # H-alpha depends on output of r
def plot_calibrated_fields(): inputdir = constants.PATH_BANDMERGED outputdir = os.path.join(CALIBDIR, 'diagrams') util.setup_dir(outputdir) ca = CalibrationApplicator() args = [] for i, field in enumerate(IPHASQC['id'][IPHASQC_COND_RELEASE]): args.append((field, inputdir, outputdir, ca.get_field_shifts(field))) log.info('Starting to plot {0} anchors'.format(len(args))) from multiprocessing import Pool mypool = Pool(4) mypool.map(plot_field, args)
def compute_offsets_band(clusterview, band, destination=os.path.join(constants.DESTINATION, 'calibration')): """Computes magnitude offsets between all overlapping runs in a given band. The output is a file called offsets-{band}.csv which contains the columns run1 -- reference exposure (telescope run number) run2 -- comparison exposure (telescope run number) offset -- median(run1_magnitudes - run2_magnitudes) std -- stdev(run1_magnitudes - run2_magnitudes) n -- number of crossmatched stars used in computing offset/std. Parameters ---------- clusterview : cluster view derived used e.g. IPython.parallel.Client()[:] Work will be spread across the nodes in this cluster view. band : string One of 'r', 'i', 'ha'. destination : string Directory where the output csv file will be written. """ assert(band in constants.BANDS) log.info('Starting to compute offsets for band {0}'.format(band)) # Write the results util.setup_dir(destination) filename = os.path.join(destination, 'offsets-{0}.csv'.format(band)) out = open(filename, 'w') out.write('run1,run2,offset,std,n\n') # Distribute the work across the cluster runs = IPHASQC['run_'+str(band)][constants.IPHASQC_COND_RELEASE] np.random.shuffle(runs) # Avoid one node getting all the crowded fields #runs = IPHASQC['run_'+str(band)] results = clusterview.imap(offsets_one, runs) # Write offsets to the CSV file as the results are returned i = 0 for offsets in results: i += 1 for row in offsets: if row is not None: out.write('{run1},{run2},{offset},{std},{n}\n'.format(**row)) # Print a friendly status message once in a while if (i % 100) == 0: log.info('Completed run {0}/{1}'.format(i, len(runs))) out.flush() out.close()
def bandmerge(clusterview): """Band-merge all fields.""" util.setup_dir(MYDESTINATION) # Spread the work across the cluster field_ids = IPHASQC.field('id') results = clusterview.imap(bandmerge_one, field_ids) # Print a friendly message once in a while i = 0 for status in results: i += 1 if (i % 1000) == 0: log.info('Completed field {0}/{1}'.format(i, len(field_ids))) log.info('Bandmerging finished')
def plot_anchors(): """Plots diagrams of the anchors.""" # Setup output directory inputdir = constants.PATH_BANDMERGED outputdir = os.path.join(CALIBDIR, 'anchors') util.setup_dir(outputdir) # Which fields to plot? anchorlist = ascii.read(os.path.join(CALIBDIR, 'anchors-r-initial.csv')) anchor_runs = anchorlist['run'][anchorlist['is_anchor'] == 'True'] fields = [util.run2field(myrun, 'r') for myrun in anchor_runs] # Distribute work log.info('Starting to plot {0} anchors'.format(len(fields))) from multiprocessing import Pool mypool = Pool(4) mypool.map(plot_field, zip(fields, [inputdir]*len(fields), [outputdir]*len(fields), [{'r':0.0, 'i':0.0, 'ha':0.0}]*len(fields)))
mycolumns = (str('filename'), str('band'), str('dr2'), str('run'), str('ccd'), str('field'), str('ra'), str('dec'), str('ra1'), str('ra2'), str('dec1'), str('dec2'), str('photzp'), str('exptime'), str('time')) rows = list(itertools.chain.from_iterable(metadata)) # flatten list t = table.Table(rows, names=mycolumns) table_filename=os.path.join(constants.PATH_IMAGES, 'iphas-images.fits') t.write(table_filename, format='fits', overwrite=True) ################################ # MAIN EXECUTION (FOR DEBUGGING) ################################ if __name__ == '__main__': """ from IPython.parallel import client client = client.client.Client() with client[:].sync_imports(): from dr2.images import SurveyImage from dr2 import constants from dr2 import util from astropy import log from astropy.io import fits import os prepare_images(client[:]) """ util.setup_dir(os.path.join(constants.PATH_IMAGES, 'halpha')) prepare_one(571408)
conserves[result['id']] = console_props hnodes_with_consoles = {} results = transactional.get_hnodes_with_consoles(db) for result in results: hnodes_with_consoles[result['handle']] = \ {'conserver': conserves[result['id']], 'port': result['console_port']} except (RuntimeError, SystemExit): raise except Exception, e: logger.error("generate: failed pulling data - %s", e) raise try: util.setup_dir(write_loc) except (RuntimeError, SystemExit): raise except Exception, e: logger.error("generate: failed ensuring write directory %r: %s", write_loc, e) raise try: fp = open(os.path.join(write_loc, 'conserver.cf'), 'w') fp.write(COMMON) for k, v in conserves.items(): print >> fp, 'default %s {' % v['fqdn'].split('.', 1)[0] print >> fp, '\tinclude %s' % v['include'] print >> fp, '\thost %s' % v['host'] print >> fp, '\tbaud %s' % v['baud'] print >> fp, '}'