def scramble(expnums, ccd, version='p', dry_run=False): """run the plant script on this combination of exposures""" mjds = [] fobjs = [] for expnum in expnums: filename = storage.get_image(expnum, ccd=ccd, version=version) fobjs.append(fits.open(filename)) # Pull out values to replace in headers.. must pull them # as otherwise we get pointers... mjds.append(fobjs[-1][0].header['MJD-OBS']) order = [0, 2, 1] for idx in range(len(fobjs)): logging.info("Flipping %d to %d" % (fobjs[idx][0].header['EXPNUM'], expnums[order[idx]])) fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]] fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]] uri = storage.get_uri(expnums[order[idx]], ccd=ccd, version='s', ext='fits') fname = os.path.basename(uri) if os.access(fname, os.F_OK): os.unlink(fname) fobjs[idx].writeto(fname) if dry_run: continue storage.copy(fname, uri) return
def mkpsf(expnum, ccd): """Run the OSSOS makepsf script. """ ## get image from the vospace storage area filename = storage.get_image(expnum, ccd, version='p') logging.info("Running mkpsf on %s %d" % (expnum, ccd)) ## launch the makepsf script util.exec_prog(['jmpmakepsf.csh', './', filename, 'no']) ## place the results into VOSpace basename = os.path.splitext(filename)[0] ## confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, version='p',ext='fits')) logging.info("Checking that destination direcoties exist") storage.mkdir(destdir) for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, version='p', ext=ext) source = basename + "." + ext storage.copy(source, dest) return
def cutout(member, object_name, image, ra, dec, radius, ra_rate, dec_rate): global vos_dir if member == 'none': vos_dir = '{}/nofamily'.format(_VOS_PATH) #Planned to sort stamps on VOS into groups, but this didn't happen and so currently everything will be saved to /hasfamily else: vos_dir = '{}/hasfamily'.format(_VOS_PATH) radius = radius * units.deg expnum = image.strip('p') # only use processed images target = storage.get_uri(expnum) coord = SkyCoord(unit="deg", ra=ra, dec=dec) hdulist = storage.ra_dec_cutout(target, coord, radius) hdulist[0].header['OBJNUM'] = (object_name,'object') #Put image info into header for later use hdulist[0].header['RA'] = (ra,'degrees') hdulist[0].header['DEC'] = (dec,'degrees') hdulist[0].header['RARATE'] = (ra_rate,'arcsec/hr') hdulist[0].header['DECRATE'] = (dec_rate,'arcsec/hr') postage_stamp_filename = "{}_{}_{:8f}_{:8f}.fits".format(object_name, image, float(ra), float(dec)) print postage_stamp_filename hdulist.writeto("{}/{}".format(_STAMPS_DIR, postage_stamp_filename), output_verify='warn', clobber=True) del hdulist try: storage.copy('{}/{}'.format(_STAMPS_DIR, postage_stamp_filename), '{}/{}'.format(vos_dir, postage_stamp_filename)) except Exception, e: print e
def cutout(obj, obj_dir, radius, username, password): for obs in obj.mpc_observations: # FIXME: TESTING ONLY if obs.null_observation: continue expnum = obs.comment.frame.split('p')[0] # only want calibrated images # Using the WCS rather than the X/Y, as the X/Y can be unreliable on a long-term basis this_cutout = "CIRCLE ICRS {} {} {}".format(obs.coordinate.ra.degree, obs.coordinate.dec.degree, radius) print this_cutout # FIXME: should be able to use line below, but bug in VOSpace requires direct-access workaround, for now. # postage_stamp = storage.get_image(expnum, cutout=cutout) target = storage.vospace.fixURI(storage.get_uri(expnum)) direction = "pullFromVoSpace" protocol = "ivo://ivoa.net/vospace/core#httpget" view = "cutout" params = {"TARGET": target, "PROTOCOL": protocol, "DIRECTION": direction, "cutout": this_cutout, "view": view} r = requests.get(BASEURL, params=params, auth=(username, password)) r.raise_for_status() # confirm the connection worked as hoped postage_stamp_filename = "{}_{:11.5f}_{:09.5f}_{:+09.5f}.fits".format(obj.provisional_name, obs.date.mjd, obs.coordinate.ra.degree, obs.coordinate.dec.degree) logging.info("{}".format(postage_stamp_filename)) with open(postage_stamp_filename, 'w') as tmp_file: tmp_file.write(r.content) storage.copy(postage_stamp_filename, obj_dir + "/" + postage_stamp_filename) os.unlink(postage_stamp_filename) # easier not to have them hanging around
def mkpsf(expnum, ccd, version, dry_run=False, prefix=""): """Run the OSSOS jmpmakepsf script. """ ## confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) ## get image from the vospace storage area logging.info("Getting file from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) logging.info("Running mkpsf on %s %d" % (expnum, ccd)) ## launch the makepsf script logging.info(util.exec_prog(['jmpmakepsf.csh', './', filename, 'yes', 'yes'])) if dry_run: return ## place the results into VOSpace basename = os.path.splitext(filename)[0] for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext=ext) source = basename + "." + str(ext) storage.copy(source, dest) return
def mkpsf(expnum, ccd, fversion): """Run the OSSOS makepsf script. """ ## get image from the vospace storage area filename = storage.get_image(expnum, ccd, version=fversion) logging.info("Running mkpsf on %s %d" % (expnum, ccd)) ## launch the makepsf script util.exec_prog(['jmpmakepsf.csh', './', filename, 'no']) ## place the results into VOSpace basename = os.path.splitext(filename)[0] ## confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, version=fversion,ext='fits')) logging.info("Checking that destination directories exist") storage.mkdir(destdir) for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, version=fversion, ext=ext) source = basename + "." + ext logging.info("Copying %s -> %s" % ( source, dest)) storage.remove(dest) storage.copy(source, dest) return
def mk_mopheader(expnum, ccd, version, dry_run=False, prefix=""): """Run the OSSOS mopheader script. """ ## confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) ## get image from the vospace storage area filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) logging.info("Running mopheader on %s %d" % (expnum, ccd)) ## launch the mopheader script ## launch the makepsf script expname = os.path.basename(filename).strip('.fits') logging.info(util.exec_prog(['stepZjmp', '-f', expname])) mopheader_filename = expname+".mopheader" # mopheader_filename = mopheader.main(filename) if dry_run: return destination = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='mopheader') source = mopheader_filename storage.copy(source, destination) return
def fits_data(object_name, expnum, family_name): """ Creates local copy of fits file from VOSpace """ if family_name == 'none': vos_dir = '{}/none'.format(_VOS_DIR) else: vos_dir = '{}/all'.format(_VOS_DIR) assert storage.exists(vos_dir), 'Vos directory does not exist, or permissions have expired' for fits_file in client.listdir(vos_dir): # images named with convention: object_expnum_RA_DEC.fits if fits_file.endswith('.fits'): objectname_file = fits_file.split('_')[0] expnum_file = fits_file.split('_')[1] if (expnum_file == expnum) and (objectname_file == object_name): file_path = '{}/{}'.format(_STAMPS_DIR, fits_file) storage.copy('{}/{}'.format(vos_dir, fits_file), file_path) with fits.open(file_path) as hdulist: data = hdulist[0].data header = hdulist[0].header os.unlink(file_path) return header, data, fits_file
def step1(expnum, ccd, prefix='', version='p', fwhm=4, sex_thresh=1.3, wave_thresh=2.7, maxcount=30000): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) mopheader = storage.get_image(expnum, ccd, version=version, ext='mopheader', prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] outfile = util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)]) obj_uri = storage.get_uri(expnum,ccd,version=version,ext='obj.jmp', prefix=prefix) obj_filename = basename+".obj.jmp" storage.copy(obj_filename,obj_uri) ## for step1matt we need the weight image hdulist = fits.open(filename) flat_name = hdulist[0].header.get('FLAT','06Bm02.flat.r.36.01.fits') flat_name = flat_name[0:-5] flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators', rescale=False) if not os.access('weight.fits',os.R_OK): os.symlink(flat_filename, 'weight.fits') outfile = util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)]) obj_uri = storage.get_uri(expnum,ccd,version=version,ext='obj.matt', prefix=prefix) obj_filename = basename+".obj.matt" storage.copy(obj_filename,obj_uri) return True
def main(expnum, ccd): header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list(header.get('DATASEC', '[80:2080,30,4160]')) try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del(header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.'+ext name = "{}p{:02d}.{}".format(expnum, ccd, extension) try: os.unlink(name) os.unlink(name+".fits") except: pass logging.info("Retrieving {}".format(name)) obj_file = mop_file.Parser(expnum, ccd, extension) obj_file.parse() t = numpy.all([datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1], datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3]], axis=0) logging.info("Source remaining after datasec cut: {} of {}".format(len(obj_file.data[t]['X']), len(t))) table_hdu = fits.table_to_hdu(obj_file.data[t]) table_hdu.header['CATALOG'] = name table_hdu.header['EXTNAME'] = ext hdu_list.append(table_hdu) del table_hdu del obj_file os.unlink(name) name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits') if os.access(name, os.F_OK): os.unlink(name) hdu_list.writeto(name) uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits") logging.info(name+" -> "+uri) count = 0 while True: print("Copy attempt {}".format(count)) try: storage.copy(name, uri) os.unlink(name) break except Exception as ex: if count > 10: raise ex count += 1
def create_ascii_table(observation_table, outfile): """Given a table of observations create an ascii log file for easy parsing. Store the result in outfile (could/should be a vospace dataNode) observation_table: astropy.votable.array object outfile: str (name of the vospace dataNode to store the result to) """ logging.info("writing text log to %s" % outfile) stamp = "#\n# Last Updated: " + time.asctime() + "\n#\n" header = "| %20s | %20s | %20s | %20s | %20s | %20s | %20s |\n" % ( "EXPNUM", "OBS-DATE", "FIELD", "EXPTIME(s)", "RA", "DEC", "RUNID") bar = "=" * (len(header) - 1) + "\n" if outfile[0:4] == "vos:": temp_file = tempfile.NamedTemporaryFile(suffix='.txt') fout = temp_file else: fout = open(outfile, 'w') t2 = None fout.write(bar + stamp + bar + header) populated = storage.list_dbimages() for i in range(len(observation_table) - 1, -1, -1): row = observation_table.data[i] if row['dataset_name'] not in populated: storage.populate(row['dataset_name']) str_date = str( ephem.date(row.StartDate + 2400000.5 - ephem.julian_date(ephem.date(0))))[:20] t1 = time.strptime(str_date, "%Y/%m/%d %H:%M:%S") if t2 is None or math.fabs(time.mktime(t2) - time.mktime(t1)) > 3 * 3600.0: fout.write(bar) t2 = t1 ra = str(ephem.hours(math.radians(row.RA))) dec = str(ephem.degrees(math.radians(row.DEC))) line = "| %20s | %20s | %20s | %20.1f | %20s | %20s | %20s |\n" % ( str(row.dataset_name), str( ephem.date(row.StartDate + 2400000.5 - ephem.julian_date(ephem.date(0))))[:20], row.TargetName[:20], row.ExposureTime, ra[:20], dec[:20], row.ProposalID[:20]) fout.write(line) fout.write(bar) if outfile[0:4] == "vos:": fout.flush() storage.copy(fout.name, outfile) fout.close() return
def step3(expnums, ccd, version, rate_min, rate_max, angle, width, field=None, prefix=None): '''run the actual step2 on the given exp/ccd combo''' jmp_args = ['step3jmp'] matt_args = ['step3matt'] idx = 0 cmd_args = [] for expnum in expnums: idx += 1 for ext in ['unid.jmp', 'unid.matt', 'trans.jmp' ]: filename = storage.get_image(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix ) image = os.path.splitext(os.path.splitext(os.path.basename(filename))[0])[0] cmd_args.append('-f%d' % ( idx)) cmd_args.append(image) cmd_args.extend(['-rn', str(rate_min), '-rx', str(rate_max), '-a', str(angle), '-w', str(width)]) jmp_args.extend(cmd_args) matt_args.extend(cmd_args) util.exec_prog(jmp_args) util.exec_prog(matt_args) if field is None: field = str(expnums[0]) storage.mkdir(os.path.dirname( storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix))) for ext in ['moving.jmp', 'moving.matt']: uri = storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = '%s%d%s%s.%s' % ( prefix, expnums[0], version, str(ccd).zfill(2), ext) storage.copy(filename, uri) return
def step3(expnums, ccd, version, rate_min, rate_max, angle, width, field=None, prefix=None, dry_run=False, maximum_flux_ratio=3, minimum_area=5, minimum_median_flux=1000.0): """run the actual step3 on the given exp/ccd combo""" jmp_args = ['step3jmp'] matt_args = ['step3jjk'] idx = 0 cmd_args = [] for expnum in expnums: idx += 1 for ext in ['unid.jmp', 'unid.matt']: storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) image = os.path.splitext(os.path.basename(storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0] cmd_args.append('-f%d' % idx) cmd_args.append(image) cmd_args.extend(['-rn', str(rate_min), '-rx', str(rate_max), '-a', str(angle), '-w', str(width)]) jmp_args.extend(cmd_args) # Add some extra arguemnents for the ISO search. cmd_args.extend(['-fr', str(maximum_flux_ratio), '-ma', str(minimum_area), '-mf', str(minimum_median_flux)]) matt_args.extend(cmd_args) logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) if dry_run: return if field is None: field = str(expnums[0]) storage.mkdir(os.path.dirname(storage.get_uri(field, ccd=ccd, version=version, prefix=prefix))) for ext in ['moving.jmp', 'moving.matt']: uri = storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = '%s%d%s%s.%s' % (prefix, expnums[0], version, str(ccd).zfill(2), ext) storage.copy(filename, uri) return
def copy_unconsidered(names): local_path = '/Users/michele/measure3/2013A-E/genuine_reals_run/tracking/discoveries/' discovery_files = storage.listdir(DISCOVERIES) for fn in discovery_files: for name in names: if fn.__contains__(name): print fn storage.copy(DISCOVERIES+fn, local_path+fn) return
def create_ascii_table(observation_table, outfile): """Given a table of observations create an ascii log file for easy parsing. Store the result in outfile (could/should be a vospace dataNode) observation_table: astropy.votable.array object outfile: str (name of the vospace dataNode to store the result to) """ logging.info("writing text log to %s" % outfile) stamp = "#\n# Last Updated: " + time.asctime() + "\n#\n" header = "| %20s | %20s | %20s | %20s | %20s | %20s | %20s |\n" % ( "EXPNUM", "OBS-DATE", "FIELD", "EXPTIME(s)", "RA", "DEC", "RUNID") bar = "=" * (len(header) - 1) + "\n" if outfile[0:4] == "vos:": temp_file = tempfile.NamedTemporaryFile(suffix='.txt') fout = temp_file else: fout = open(outfile, 'w') t2 = None fout.write(bar + stamp + bar + header) populated = storage.list_dbimages() for i in range(len(observation_table) - 1, -1, -1): row = observation_table.data[i] if row['dataset_name'] not in populated: storage.populate(row['dataset_name']) str_date = str(ephem.date(row.StartDate + 2400000.5 - ephem.julian_date(ephem.date(0))))[:20] t1 = time.strptime(str_date, "%Y/%m/%d %H:%M:%S") if t2 is None or math.fabs(time.mktime(t2) - time.mktime(t1)) > 3 * 3600.0: fout.write(bar) t2 = t1 ra = str(ephem.hours(math.radians(row.RA))) dec = str(ephem.degrees(math.radians(row.DEC))) line = "| %20s | %20s | %20s | %20.1f | %20s | %20s | %20s |\n" % ( str(row.dataset_name), str(ephem.date(row.StartDate + 2400000.5 - ephem.julian_date(ephem.date(0))))[:20], row.TargetName[:20], row.ExposureTime, ra[:20], dec[:20], row.ProposalID[:20]) fout.write(line) fout.write(bar) if outfile[0:4] == "vos:": fout.flush() storage.copy(fout.name, outfile) fout.close() return
def plant(expnums, ccd, rmin, rmax, ang, width, number=10, mmin=21.0, mmax=25.5, version='s', dry_run=False): """Plant artificial sources into the list of images provided. :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to :param ccd: which ccd to work on. :param rmin: The minimum rate of motion to add sources at (''/hour) :param rmax: The maximum rate of motion to add sources at (''/hour) :param ang: The mean angle of motion to add sources :param width: The +/- range of angles of motion :param version: Add sources to the 'o', 'p' or 's' images :param dry_run: don't push results to VOSpace. """ # Construct a list of artificial KBOs with positions in the image # and rates of motion within the bounds given by the caller. filename = storage.get_image(expnums[0], ccd=ccd, version=version) header = fits.open(filename)[0].header bounds = util.get_pixel_bounds_from_datasec_keyword(header.get('DATASEC', '[33:2080,1:4612]')) # generate a set of artificial KBOs to add to the image. kbos = KBOGenerator.get_kbos(n=number, rate=(rmin, rmax), angle=(ang - width, ang + width), mag=(mmin, mmax), x=(bounds[0][0], bounds[0][1]), y=(bounds[1][0], bounds[1][1]), filename='Object.planted') for expnum in expnums: filename = storage.get_image(expnum, ccd, version) psf = storage.get_file(expnum, ccd, version, ext='psf.fits') plant_kbos(filename, psf, kbos, get_shifts(expnum, ccd, version), "fk") if dry_run: return uri = storage.get_uri('Object', ext='planted', version='', subdir=str( expnums[0]) + "/ccd%s" % (str(ccd).zfill(2))) storage.copy('Object.planted', uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext='fits', prefix='fk') filename = os.path.basename(uri) storage.copy(filename, uri) return
def cutout(obj, obj_dir, radius): cutout_listing = storage.listdir(obj_dir, force=True) for obs in obj.mpc_observations: if obs.null_observation: logging.debug('skipping: {}'.format(obs)) continue if obs.date > parameters.SURVEY_START: # can't make postage stamps of earlier linkages # can't parse for an obs.comment's exposure number if no # obs.comment exists try: parts = storage.frame2expnum(obs.comment.frame) except Exception as ex: logging.warning(f"Skipping: {obs}") logging.debug(f"Failed to map comment.frame to expnum: {ex}") continue uri = storage.get_uri(parts['expnum'], version=parts['version']) sky_coord = obs.coordinate # Using the WCS rather than the X/Y # (X/Y can be unreliable over the whole survey) postage_stamp_filename = f"{obj.provisional_name}_" \ f"{obs.date.mjd:11.5f}_" \ f"{obs.coordinate.ra.degree:09.5f}_" \ f"{obs.coordinate.dec.degeee:09.5f}.fits" if postage_stamp_filename in cutout_listing: # skipping existing cutouts continue # ast_header = storage._get_sghead(parts['expnum']) while True: try: hdulist = storage.ra_dec_cutout(uri, sky_coord, radius, update_wcs=True) with open(postage_stamp_filename, 'w') as tmp_file: hdulist.writeto(tmp_file, overwrite=True, output_verify='fix+ignore') storage.copy(postage_stamp_filename, obj_dir + "/" + postage_stamp_filename) os.unlink \ (postage_stamp_filename) # easier not to have them hanging around except OSError as e: # occasionally the node is not found: report and move on for later cleanup logging.error("OSError: -> " +str(e)) except Exception as e: logging.error("Exception: -> " +str(e)) continue break
def update_vos_with_local_files(user_id, vos_dir, dir_to_scan): uploaded_count = 0 donefiles = glob(dir_to_scan + '*.reals.astrom') for fname in donefiles: fn = fname.rsplit('/')[len(fname.rsplit('/')) - 1] vo_reals = vos_dir + fn mv_file = vos_dir + fn.replace('reals', 'cands') # check if file's .cands.astrom equivalent in VOSpace has a #done tag wasdone = storage.get_property(mv_file, 'done') if not wasdone: if not storage.exists(vo_reals): # shouldn't possibly be there but let's just make sure storage.copy(fname, vo_reals) storage.set_property(mv_file, 'done', user_id) # set the .cands.astrom #done tag to the user ID. uploaded_count += 1 else: print fn, wasdone print 'Added unique files:', uploaded_count return
def cutout(obj, obj_dir, radius): print(len([n for n in obj.mpc_observations if not n.null_observation])) for obs in obj.mpc_observations: print('starting analysis of {}'.format(str(obs))) if obs.null_observation: print('skipping') continue if obs.date > parameters.SURVEY_START: # can't make postage stamps of earlier linkages # can't parse for an obs.comment's exposure number if no obs.comment exists try: expnum = obs.comment.frame.split('p')[0].strip(' ') # only want calibrated images except AttributeError, e: print('No comment in this MPC line!') continue if not expnum.isdigit(): print('expnum {} parsed from comment line invalid. Check comment parsing.\n{}'.format( expnum, str(obs.comment)) ) continue uri = storage.get_uri(expnum) sky_coord = obs.coordinate # Using the WCS rather than the X/Y (X/Y can be unreliable over the whole survey) print('Trying {} on {} on {}...'.format(obj.provisional_name, obs.date, expnum)) try: hdulist = storage.ra_dec_cutout(uri, sky_coord, radius) # if not 'ASTLEVEL' in hdulist[1].header: # FIXME: activate once all headers are retro-fitted with ASTLEVEL # logging.info('Cutout invalid for use. Skipping inclusion.\n') # continue postage_stamp_filename = "{}_{:11.5f}_{:09.5f}_{:+09.5f}.fits".format(obj.provisional_name, obs.date.mjd, obs.coordinate.ra.degree, obs.coordinate.dec.degree) print("{}".format(postage_stamp_filename)) with open(postage_stamp_filename, 'w') as tmp_file: hdulist.writeto(tmp_file, clobber=True) storage.copy(postage_stamp_filename, obj_dir + "/" + postage_stamp_filename) os.unlink(postage_stamp_filename) # easier not to have them hanging around except OSError, e: # occasionally the node is not found: report and move on for later cleanup print e continue
def cutout(obj, obj_dir, radius): cutout_listing = storage.listdir(obj_dir, force=True) for obs in obj.mpc_observations: if obs.null_observation: logging.debug('skipping: {}'.format(obs)) continue if obs.date > parameters.SURVEY_START: # can't make postage stamps of earlier linkages # can't parse for an obs.comment's exposure number if no obs.comment exists try: parts = storage.frame2expnum(obs.comment.frame) except Exception as ex: logging.error("Skipping: {}\nFailed to map comment.frame to expnum: {}".format(obs, ex)) continue uri = storage.get_uri(parts['expnum'], version=parts['version']) sky_coord = obs.coordinate # Using the WCS rather than the X/Y (X/Y can be unreliable over the whole survey) postage_stamp_filename = "{}_{:11.5f}_{:09.5f}_{:+09.5f}.fits".format(obj.provisional_name, obs.date.mjd, obs.coordinate.ra.degree, obs.coordinate.dec.degree) if postage_stamp_filename in cutout_listing: # skipping existing cutouts continue # ast_header = storage._get_sghead(parts['expnum']) while True: try: hdulist = storage.ra_dec_cutout(uri, sky_coord, radius, update_wcs=True) with open(postage_stamp_filename, 'w') as tmp_file: hdulist.writeto(tmp_file, overwrite=True, output_verify='fix+ignore') storage.copy(postage_stamp_filename, obj_dir + "/" + postage_stamp_filename) os.unlink(postage_stamp_filename) # easier not to have them hanging around except OSError as e: # occasionally the node is not found: report and move on for later cleanup logging.error("OSError: ->"+str(e)) except Exception as e: logging.error("Exception: ->"+str(e)) continue break
def step2(expnums, ccd, version, prefix=None): '''run the actual step2 on the given exp/ccd combo''' jmp_args = ['step2jmp'] matt_args = ['step2matt_jmp'] idx = 0 for expnum in expnums: jmp_args.append( storage.get_image(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix )[0:-8] ) idx += 1 matt_args.append('-f%d' % ( idx)) matt_args.append( storage.get_image(expnum, ccd=ccd, version=version, ext='obj.matt', prefix=prefix )[0:-9] ) util.exec_prog(jmp_args) util.exec_prog(matt_args) for expnum in expnums: for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']: uri = storage.dbimages_uri(expnum,ccd=ccd,version=version,ext=ext, prefix=prefix) filename = os.path.basename(uri) storage.copy(filename, uri) return
def plant(expnums, ccd, rmin, rmax, ang, width, version='s'): '''run the plant script on this combination of exposures''' ptf = open('proc-these-files','w') ptf.write("# Files to be planted and search\n") ptf.write("# image fwhm plant\n") for expnum in expnums: fwhm = storage.get_fwhm(expnum,ccd) filename = storage.get_image(expnum, ccd=ccd, version=version) ptf.write("%s %3.1f YES\n" % ( filename[0:-5], fwhm )) for ext in ['apcor', 'obj.jmp', 'trans.jmp', 'psf.fits', 'mopheader', 'phot', 'zeropoint.used']: apcor = storage.get_image(expnum, ccd=ccd, version='s', ext=ext) ptf.close() cmd_args = ['plant.csh',os.curdir, str(rmin), str(rmax), str(ang), str(width)] util.exec_prog(cmd_args) if args.dryrun: # Don't push back to VOSpace return uri = storage.get_uri('Object',ext='planted',version='', subdir=str( expnums[0])+"/ccd%s" % (str(ccd).zfill(2))) storage.copy('Object.planted',uri) uri = os.path.join(os.path.dirname(uri), 'shifts') storage.copy('shifts', uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext='fits', prefix='fk') filename = os.path.basename(uri) storage.copy(filename, uri) for ext in ['mopheader', 'psf.fits', 'fwhm', 'apcor', 'zeropoint.used', 'trans.jmp']: storage.delete(expnum, ccd, 's', ext, prefix='fk') storage.vlink(expnum, ccd, 'p', ext, expnum, ccd, 's', ext, l_prefix='fk') return
def get_fits_data(object_name, expnum_p, family_name, ap, th): """ Finds image in VOSpace, determines number of extensions, inputs parameters aperture and threshold into the photometry method, returns photometry measurements and header values """ try: if family_name == 'none': vos_dir = '{}/none'.format(_VOS_DIR) else: vos_dir = '{}/all'.format(_VOS_DIR) assert storage.exists(vos_dir) for fits_file in client.listdir(vos_dir): # images named with convention: object_expnum_RA_DEC.fits if fits_file.endswith('.fits'): objectname_file = fits_file.split('_')[0] expnum_file = fits_file.split('_')[1] if (expnum_file == expnum_p) and (objectname_file == object_name): file_path = '{}/{}'.format(_STAMPS_DIR, fits_file) storage.copy('{}/{}'.format(vos_dir, fits_file), file_path) data = fits.getdata(file_path) header = fits.getheader(file_path) objs = sep_phot(data, ap, th) os.unlink(file_path) return objs, header, data except TypeError: print "WARNING: Image does not exist for {} {}".format(object_name, expnum_p) raise except Exception, e: print 'Error retrieving fits data: {}'.format(e) write_to_error_file(object_name, expnum_p, out_filename=_OUTPUT_PHOT_ERR, family_name=family_name) raise
def scramble(expnums, ccd, version='p'): '''run the plant script on this combination of exposures''' mjds = [] fobjs = [] for expnum in expnums: filename = storage.get_image(expnum, ccd=ccd, version=version) fobjs.append(fits.open(filename)) # Pull out values to replace in headers.. must pull them # as otherwise we get pointers... mjds.append(fobjs[-1][0].header['MJD-OBS']) order = [1, 0, 2] for idx in range(len(fobjs)): logging.info("Flipping %d to %d" % (fobjs[idx][0].header['EXPNUM'], expnums[order[idx]])) fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]] fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]] uri = storage.get_uri(expnums[order[idx]], ccd=ccd, version='s', ext='fits') fname = os.path.basename(uri) if os.access(fname, os.F_OK): os.unlink(fname) fobjs[idx].writeto(fname) storage.copy(fname, uri) # now make a link between files that the plant system will need for ext in [ 'apcor', 'obj.jmp', 'mopheader', 'phot', 'psf.fits', 'trans.jmp', 'zeropoint.used', 'fwhm' ]: storage.delete(expnums[order[idx]], ccd, 's', ext) storage.vlink(expnums[idx], ccd, 'p', ext, expnums[order[idx]], ccd, 's', ext) return
def step2(expnums, ccd, version, prefix=None): '''run the actual step2 on the given exp/ccd combo''' jmp_args = ['step2jmp'] matt_args = ['step2matt_jmp'] idx = 0 for expnum in expnums: jmp_args.append( storage.get_image(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]) idx += 1 matt_args.append('-f%d' % (idx)) matt_args.append( storage.get_image(expnum, ccd=ccd, version=version, ext='obj.matt', prefix=prefix)[0:-9]) util.exec_prog(jmp_args) util.exec_prog(matt_args) for expnum in expnums: for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']: uri = storage.dbimages_uri(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = os.path.basename(uri) storage.copy(filename, uri) return
def get_star_data(asteroid_id, mag, expnum, header): """ From ossos psf fitted image, calculate mean of the flux of each row of the rotated PSF """ # calculate mean psf uri = storage.get_uri(expnum.strip('p'), header[_CCD].split('d')[1]) ossos_psf = '{}.psf.fits'.format(uri.strip('.fits')) local_psf = '{}{}.psf.fits'.format(expnum, header[_CCD].split('d')[1]) local_file_path = '{}/{}'.format(_STAMPS_DIR, local_psf) storage.copy(ossos_psf, local_file_path) # pvwcs = wcs.WCS(header) # x, y = pvwcs.sky2xy(asteroid_id['ra'].values, asteroid_id['dec'].values) x = asteroid_id[_XMID_HEADER].values[0] y = asteroid_id[_YMID_HEADER].values[0] # run seepsf on the mean psf image iraf.set(uparm="./") iraf.digiphot(_doprint=0) iraf.apphot(_doprint=0) iraf.daophot(_doprint=0) iraf.seepsf(local_file_path, local_psf, xpsf=x, ypsf=y, magnitude=mag) with fits.open(local_psf) as hdulist: data = hdulist[0].data th = math.degrees(asteroid_id[_THETA_HEADER].values[0]) data_rot = rotate(data, th) data_rot = np.ma.masked_where(data_rot == 0, data_rot) data_mean = np.ma.mean(data_rot, axis=1) os.unlink(local_psf) os.unlink(local_file_path) return data_mean[np.nonzero(np.ma.fix_invalid(data_mean, fill_value=0))[0]]
def mkpsf(expnum, ccd, version, dry_run=False, prefix=""): """Run the OSSOS jmpmakepsf script. """ ## confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) ## get image from the vospace storage area filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) logging.info("Running mkpsf on %s %d" % (expnum, ccd)) ## launch the makepsf script logging.info(util.exec_prog(['jmpmakepsf.csh', './', filename, 'no'])) if dry_run: return ## place the results into VOSpace basename = os.path.splitext(filename)[0] for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext=ext) source = basename + "." + str(ext) storage.copy(source, dest) return
def scramble(expnums, ccd, version='p'): '''run the plant script on this combination of exposures''' mjds = [] fobjs = [] for expnum in expnums: filename = storage.get_image(expnum, ccd=ccd, version=version) fobjs.append(fits.open(filename)) # Pull out values to replace in headers.. must pull them # as otherwise we get pointers... mjds.append(fobjs[-1][0].header['MJD-OBS']) order = [1,0,2] for idx in range(len(fobjs)): logging.info("Flipping %d to %d" % ( fobjs[idx][0].header['EXPNUM'], expnums[order[idx]])) fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]] fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]] uri = storage.get_uri(expnums[order[idx]], ccd=ccd, version='s', ext='fits') fname = os.path.basename(uri) if os.access(fname, os.F_OK): os.unlink(fname) fobjs[idx].writeto(fname) storage.copy(fname, uri) # now make a link between files that the plant system will need for ext in ['apcor', 'obj.jmp', 'mopheader', 'phot', 'psf.fits','trans.jmp', 'zeropoint.used', 'fwhm']: if storage.exists(storage.get_uri(expnums[order[idx]], ccd, 's', ext)): storage.delete(expnums[order[idx]], ccd, 's', ext) storage.vlink(expnums[idx], ccd, 'p', ext, expnums[order[idx]], ccd, 's', ext) return
def plant(expnums, ccd, rmin, rmax, ang, width, version="s"): """run the plant script on this combination of exposures""" ptf = open("proc-these-files", "w") ptf.write("# Files to be planted and search\n") ptf.write("# image fwhm plant\n") for expnum in expnums: fwhm = storage.get_fwhm(expnum, ccd, version=version) filename = storage.get_image(expnum, ccd=ccd, version=version) ptf.write("%s %3.1f YES\n" % (filename[0:-5], fwhm)) for ext in ["apcor", "obj.jmp", "trans.jmp", "psf.fits", "mopheader", "phot", "zeropoint.used"]: apcor = storage.get_image(expnum, ccd=ccd, version=version, ext=ext) ptf.close() cmd_args = ["plant.csh", os.curdir, str(rmin), str(rmax), str(ang), str(width)] util.exec_prog(cmd_args) if args.dryrun: # Don't push back to VOSpace return uri = storage.get_uri("Object", ext="planted", version="", subdir=str(expnums[0]) + "/ccd%s" % (str(ccd).zfill(2))) storage.copy("Object.planted", uri) uri = os.path.join(os.path.dirname(uri), "plant.shifts") storage.copy("shifts", uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext="fits", prefix="fk") filename = os.path.basename(uri) storage.copy(filename, uri) for ext in ["mopheader", "psf.fits", "fwhm", "apcor", "zeropoint.used", "trans.jmp"]: storage.delete(expnum, ccd, "s", ext, prefix="fk") storage.vlink(expnum, ccd, "s", ext, expnum, ccd, "s", ext, l_prefix="fk") return
def plant(expnums, ccd, rmin, rmax, ang, width, number=10, mmin=21.0, mmax=25.5, version='s', dry_run=False, force=True): """Plant artificial sources into the list of images provided. @param dry_run: don't push results to VOSpace. @param width: The +/- range of angles of motion @param ang: The mean angle of motion to add sources @param rmax: The maximum rate of motion to add sources at (''/hour) @param rmin: The minimum rate of motion to add sources at (''/hour) @param expnums: list of MegaPrime exposure numbers to add artificial KBOs to @param ccd: which ccd to work on. @param mmax: Maximum magnitude to plant sources at @param version: Add sources to the 'o', 'p' or 's' images @param mmin: Minimum magnitude to plant sources at @param number: number of sources to plant. @param force: Run, even if we already succeeded at making a fk image. """ message = storage.SUCCESS if storage.get_status(task, "", expnums[0], version, ccd) and not force: logging.info("{} completed successfully for {}{}{}{:02d}".format( task, "", expnums[0], version, ccd)) return with storage.LoggingManager(task, "", expnums[0], ccd, version, dry_run): try: # Construct a list of artificial KBOs with positions in the image # and rates of motion within the bounds given by the caller. filename = storage.get_image(expnums[0], ccd=ccd, version=version) header = fits.open(filename)[0].header bounds = util.get_pixel_bounds_from_datasec_keyword( header.get('DATASEC', '[33:2080,1:4612]')) # generate a set of artificial KBOs to add to the image. kbos = KBOGenerator.get_kbos(n=number, rate=(rmin, rmax), angle=(ang - width, ang + width), mag=(mmin, mmax), x=(bounds[0][0], bounds[0][1]), y=(bounds[1][0], bounds[1][1]), filename='Object.planted') for expnum in expnums: filename = storage.get_image(expnum, ccd, version) psf = storage.get_file(expnum, ccd, version, ext='psf.fits') plant_kbos(filename, psf, kbos, get_shifts(expnum, ccd, version), "fk") if dry_run: return uri = storage.get_uri('Object', ext='planted', version='', subdir=f"{expnums[0]}/ccd{int(ccd):02d}") storage.copy('Object.planted', uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext='fits', prefix='fk') filename = os.path.basename(uri) storage.copy(filename, uri) except Exception as ex: message = str(ex) logging.error(message) storage.set_status(task, "", expnums[0], version, ccd, status=message) return
def align(expnums, ccd, version='s', dry_run=False): """Create a 'shifts' file that transforms the space/flux/time scale of all images to the first image. This function relies on the .fwhm, .trans.jmp, .phot and .zeropoint.used files for inputs. The scaling we are computing here is for use in planting sources into the image at the same sky/flux locations while accounting for motions of sources with time. :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to, the first frame in the list is the reference. :param ccd: which ccd to work on. :param version: Add sources to the 'o', 'p' or 's' images :param dry_run: don't push results to VOSpace. """ # Get the images and supporting files that we need from the VOSpace area # get_image and get_file check if the image/file is already on disk. # re-computed fluxes from the PSF stars and then recompute x/y/flux scaling. # some dictionaries to hold the various scale pos = {} apcor = {} mags = {} zmag = {} mjdates = {} for expnum in expnums: filename = storage.get_image(expnum, ccd=ccd, version=version) zmag[expnum] = storage.get_zeropoint(expnum, ccd, prefix=None, version=version) mjdates[expnum] = float(fits.open(filename)[0].header.get('MJD-OBS')) apcor[expnum] = [ float(x) for x in open( storage.get_file( expnum, ccd=ccd, version=version, ext=storage.APCOR_EXT)).read().split() ] keys = ['crval1', 'cd1_1', 'cd1_2', 'crval2', 'cd2_1', 'cd2_2'] # load the .trans.jmp values into a 'wcs' like dictionary. # .trans.jmp maps current frame to reference frame in pixel coordinates. # the reference frame of all the frames supplied must be the same. shifts = dict( zip(keys, [ float(x) for x in open( storage.get_file( expnum, ccd=ccd, version=version, ext='trans.jmp')).read().split() ])) shifts['crpix1'] = 0.0 shifts['crpix2'] = 0.0 # now create a wcs object based on those transforms, this wcs links the current frame's # pixel coordinates to the reference frame's pixel coordinates. w = get_wcs(shifts) # get the PHOT file that was produced by the mkpsf routine phot = ascii.read(storage.get_file(expnum, ccd=ccd, version=version, ext='phot'), format='daophot') # compute the small-aperture magnitudes of the stars used in the PSF mags[expnum] = daophot.phot(filename, phot['XCENTER'], phot['YCENTER'], aperture=apcor[expnum][0], sky=apcor[expnum][1] + 1, swidth=apcor[expnum][0], zmag=zmag[expnum]) # covert the x/y positions to positions in Frame 1 based on the trans.jmp values. (x, y) = w.wcs_pix2world(mags[expnum]["XCENTER"], mags[expnum]["YCENTER"], 1) pos[expnum] = numpy.transpose([x, y]) # match this exposures PSF stars position against those in the first image of the set. idx1, idx2 = util.match_lists(pos[expnums[0]], pos[expnum]) # compute the magnitdue offset between the current frame and the reference. dmags = numpy.ma.array(mags[expnums[0]]["MAG"] - apcor[expnums[0]][2] - (mags[expnum]["MAG"][idx1] - apcor[expnum][2]), mask=idx1.mask) dmags.sort() # compute the median and determine if that shift is small compared to the scatter. dmag = dmags[int(len(dmags) / 2.0)] if math.fabs(dmag) > 3 * (dmags.std() + 0.01): logging.warn( "Magnitude shift {} between {} and {} is large: {}".format( dmag, expnums[0], expnum, shifts[expnum])) shifts['dmag'] = dmag shifts['emag'] = dmags.std() shifts['nmag'] = len(dmags.mask) - dmags.mask.sum() shifts['dmjd'] = mjdates[expnums[0]] - mjdates[expnum] shift_file = os.path.basename( storage.get_uri(expnum, ccd, version, '.shifts')) fh = open(shift_file, 'w') fh.write( json.dumps(shifts, sort_keys=True, indent=4, separators=(',', ': '))) fh.write('\n') fh.close() if not dry_run: storage.copy( shift_file, os.path.basename( storage.get_uri(expnum, ccd, version, '.shifts')))
def run(expnum, ccd, version='p', prefix='', dry_run=False, force=False): message = 'success' if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task=task, prefix=prefix, expnum=expnum, ccd=ccd, version=version, dry_run=dry_run): try: if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd): raise IOError("{} not yet run for {}".format(dependency, expnum)) header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list(header.get('DATASEC', '[80:2080,30,4160]')) try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del(header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.'+ext name = "{}p{:02d}.{}".format(expnum, ccd, extension) try: os.unlink(name) os.unlink(name+".fits") except: pass logging.info("Retrieving {}".format(name)) obj_file = mop_file.Parser(expnum, ccd, extension) obj_file.parse() t = numpy.all([datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1], datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3]], axis=0) logging.info("Source remaining after datasec cut: {} of {}".format(len(obj_file.data[t]['X']), len(t))) table_hdu = fits.table_to_hdu(obj_file.data[t]) table_hdu.header['CATALOG'] = name table_hdu.header['EXTNAME'] = ext hdu_list.append(table_hdu) del table_hdu del obj_file os.unlink(name) name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits') if os.access(name, os.F_OK): os.unlink(name) hdu_list.writeto(name) uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits") logging.info(name+" -> "+uri) count = 0 with open(name): while True: count += 1 logging.info("Copy attempt {}".format(count)) try: storage.copy(name, uri) os.unlink(name) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as e: message = str(e) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version=version, ccd=ccd, status=message)
def main(): parser = argparse.ArgumentParser() parser.add_argument('field') parser.add_argument('ccd') parser.add_argument('--expnum', default=None, help="Which exposure is the lead for this astrom file?") parser.add_argument('--astrom-filename', default=None, help="Give the astrom file directly instead of looking-up " "using the field/ccd naming scheme.") parser.add_argument('--reals', action='store_true', default=False) parser.add_argument('--type', choices=['o', 'p', 's'], help="Which type of image.", default='s') parser.add_argument('--measure3', default='vos:OSSOS/measure3/2013B-L_redo/') parser.add_argument('--dbimages', default=None) parser.add_argument('--dry-run', action='store_true', default=False) parser.add_argument('--force', action='store_true', default=False) parser.add_argument('--object-planted', default=OBJECT_PLANTED, help="Name of file contains list of planted objects.") parser.add_argument('--bright-limit', default=BRIGHT_LIMIT, help="Sources brighter than this limit {} are used to diagnose planting issues.".format( BRIGHT_LIMIT)) parser.add_argument('--minimum-bright-detections', default=MINIMUM_BRIGHT_DETECTIONS, help="required number of detections with mag brighter than bright-limit.") parser.add_argument('--minimum-bright-fraction', default=MINIMUM_BRIGHT_FRACTION, help="minimum fraction of objects above bright limit that should be found.") args = parser.parse_args() logging.basicConfig(level=logging.INFO) prefix = 'fk' ext = args.reals and 'reals' or 'cands' storage.MEASURE3 = args.measure3 if args.dbimages is not None: storage.DBIMAGES = args.dbimages astrom.DATASET_ROOT = args.dbimages astrom_uri = storage.get_cands_uri(args.field, ccd=args.ccd, version=args.type, prefix=prefix, ext="measure3.{}.astrom".format(ext)) if args.astrom_filename is None: astrom_filename = os.path.basename(astrom_uri) else: astrom_filename = args.astrom_filename if not os.access(astrom_filename, os.F_OK): astrom_filename = os.path.dirname(astrom_uri) + "/" + astrom_filename # Load the list of astrometric observations that will be looked at. fk_candidate_observations = astrom.parse(astrom_filename) if args.expnum is None: expnum = fk_candidate_observations.observations[0].expnum else: expnum = args.expnum storage.set_logger(os.path.splitext(os.path.basename(sys.argv[0]))[0], prefix, expnum, "", ext, args.dry_run) match_filename = os.path.splitext(os.path.basename(astrom_filename))[0] + '.match' exit_status = 0 status = storage.SUCCESS try: if (not storage.get_status(expnum, ccd=args.ccd, program='astrom_mag_check', version='')) or args.force: message = match_planted(fk_candidate_observations, match_filename=match_filename, object_planted=args.object_planted, bright_limit=args.bright_limit, minimum_bright_detections=args.minimum_bright_detections, bright_fraction=args.minimum_bright_fraction) match_uri = storage.get_cands_uri(args.field, ccd=args.ccd, version=args.type, prefix=prefix, ext="measure3.{}.match".format(ext)) if not args.dry_run: storage.copy(match_filename, match_uri) uri = os.path.dirname(astrom_uri) keys = [storage.tag_uri(os.path.basename(astrom_uri))] values = [message] storage.set_tags_on_uri(uri, keys, values) except Exception as err: sys.stderr.write(str(err)) status = str(err) exit_status = err.message if not args.dry_run: storage.set_status(expnum, args.ccd, 'astrom_mag_check', version='', status=status) return exit_status
def run(expnums, ccd, version, rate_min, rate_max, angle, width, field=None, prefix=None, dry_run=False, force=False): """run the actual step2 on the given exp/ccd combo""" jmp_args = ['step3jmp'] matt_args = ['step3matt'] if storage.get_status(task, prefix, expnums[0], version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {}{}{}{:02d}".format( task, prefix, expnums[0], version, ccd)) return with storage.LoggingManager(task, prefix, expnums[0], ccd, version, dry_run): try: if not storage.get_status( dependency, prefix, expnums[0], version=version, ccd=ccd): raise IOError( 35, "Cannot start {} as {} not yet completed {}{}{}{:02d}". format(task, dependency, prefix, expnums[0], version, ccd)) # Default message is success, message gets overwritten with failure messages. message = storage.SUCCESS idx = 0 cmd_args = [] for expnum in expnums: idx += 1 for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']: storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) image = os.path.splitext( os.path.basename( storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0] cmd_args.append('-f%d' % idx) cmd_args.append(image) cmd_args.extend([ '-rn', str(rate_min), '-rx', str(rate_max), '-a', str(angle), '-w', str(width) ]) jmp_args.extend(cmd_args) matt_args.extend(cmd_args) logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) if dry_run: return if field is None: field = str(expnums[0]) # Make sure a dbimages destination exists for this file. storage.mkdir( os.path.dirname( storage.get_uri(field, ccd=ccd, version=version, prefix=prefix))) for ext in ['moving.jmp', 'moving.matt']: uri = storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = '%s%d%s%s.%s' % (prefix, expnums[0], version, str(ccd).zfill(2), ext) storage.copy(filename, uri) except Exception as ex: message = str(ex) logging.error(message) storage.set_status(task, prefix, expnums[0], version=version, ccd=ccd, status=message) return
def create_vofile(self, destination): # Just copy a prototype file until I figure out how to do this # properly storage.copy(self.get_abs_path(PROTOTYPE_FILE), destination)
def scramble(expnums, ccd, version='p', dry_run=False, force=False, prefix=''): """ run the plant script on this combination of exposures @param expnums: list of exposure numbers to scramble the time on @param ccd: which CCD in (assumes this is a CFHT MegaCam MEF) @param version: should we scramble the 'p' or 'o' images? @param dry_run: if dry run then don't save back to VOSpace. @param force: if true then create scramble set, even if already exists. @param prefix: a string that will be pre-pended to the EXPNUM to get the filename, sometimes 'fk'. @return: None """ # Get a list of the MJD values and then write a re-ordering of those into files with 's' # as their type instead of 'p' or 'o' mjds = [] fobjs = [] message = storage.SUCCESS if not (force or dry_run) and storage.get_status( task, prefix, expnums[0], version='s', ccd=ccd): logging.info("{} recorded as complete for {} ccd {}".format( task, expnums, ccd)) return with storage.LoggingManager(task, prefix, expnums[0], ccd, version): try: for expnum in expnums: filename = storage.get_image(expnum, ccd=ccd, version=version) fobjs.append(fits.open(filename)) # Pull out values to replace in headers.. must pull them # as otherwise we get pointers... mjds.append(fobjs[-1][0].header['MJD-OBS']) order = [0, 2, 1] for idx in range(len(fobjs)): logging.info( "Flipping %d to %d" % (fobjs[idx][0].header['EXPNUM'], expnums[order[idx]])) fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]] fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]] uri = storage.get_uri(expnums[order[idx]], ccd=ccd, version='s', ext='fits') scramble_file_name = os.path.basename(uri) if os.access(scramble_file_name, os.F_OK): os.unlink(scramble_file_name) fobjs[idx].writeto(scramble_file_name) if not dry_run: storage.copy(scramble_file_name, uri) logging.info(message) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message) return
def create_sky_plot(observation_table, outfile): """Given a VOTable that describes the observation coverage provide a PDF of the skycoverge. observation_table: vostable.arrary outfile: name of file to write results to. """ # camera dimensions width = 0.98 height = 0.98 ax = None if outfile[0:4] == 'vos:': temp_file = tempfile.NamedTemporaryFile(suffix='.pdf') pdf = PdfPages(temp_file.name) else: pdf = PdfPages(outfile) saturn = ephem.Saturn() uranus = ephem.Uranus() t2 = None fig = None proposal_id = None limits = {'13A': (245, 200, -20, 0), '13B': (0, 45, 0, 20)} for row in reversed(observation_table.data): date = ephem.date(row.StartDate + 2400000.5 - ephem.julian_date(ephem.date(0))) str_date = str(date) # Saturn only a problem in 2013A fields saturn.compute(date) sra = math.degrees(saturn.ra) sdec = math.degrees(saturn.dec) uranus.compute(date) ura = math.degrees(uranus.ra) udec = math.degrees(uranus.dec) t1 = time.strptime(str_date, "%Y/%m/%d %H:%M:%S") if t2 is None or (math.fabs(time.mktime(t2) - time.mktime( t1)) > 3 * 3600.0 and opt.stack) or proposal_id is None or proposal_id != row.ProposalID: if fig is not None: pdf.savefig() close() proposal_id = row.ProposalID fig = figure(figsize=(7, 2)) ax = fig.add_subplot(111, aspect='equal') ax.set_title("Data taken on %s-%s-%s" % (t1.tm_year, t1.tm_mon, t1.tm_mday), fontdict={'fontsize': 8}) ax.axis(limits.get(row.ProposalID[0:3], (0, 20, 0, 20))) # appropriate only for 2013A fields ax.grid() ax.set_xlabel("RA (deg)", fontdict={'fontsize': 8}) ax.set_ylabel("DEC (deg)", fontdict={'fontsize': 8}) t2 = t1 ra = row.RA - width / 2.0 dec = row.DEC - height / 2.0 color = 'b' if 'W' in row['TargetName']: color = 'g' ax.add_artist(Rectangle(xy=(ra, dec), height=height, width=width, edgecolor=color, facecolor=color, lw=0.5, fill='g', alpha=0.33)) ax.add_artist(Rectangle(xy=(sra, sdec), height=0.3, width=0.3, edgecolor='r', facecolor='r', lw=0.5, fill='k', alpha=0.33)) ax.add_artist(Rectangle(xy=(ura, udec), height=0.3, width=0.3, edgecolor='b', facecolor='b', lw=0.5, fill='b', alpha=0.33)) if ax is not None: ax.axis((270, 215, -20, 0)) pdf.savefig() close() pdf.close() if outfile[0:4] == "vos:": storage.copy(pdf.name, outfile) pdf.close() return
def run(expnum, ccd, prefix='', version='p', sex_thresh=_SEX_THRESHOLD, wave_thresh=_WAVE_THRESHOLD, maxcount=_MAX_COUNT, dry_run=False, force=True, ignore=False): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, version, ccd) and not ignore: raise IOError(35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}".format( task, dependency, prefix, expnum, version, ccd)) logging.info("Retrieving imaging and input parameters from VOSpace") storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader') filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version, default=3.5) basename = os.path.splitext(filename)[0] _get_weight_map(filename, ccd) logging.info("Launching step1jmp") logging.info(util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)])) logging.info(util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)])) if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') if not dry_run: for ext in ['obj.jmp', 'obj.matt']: obj_uri = storage.get_uri(expnum, ccd, version=version, ext=ext, prefix=prefix) obj_filename = basename + "." + ext count = 0 with open(obj_filename, 'r'): while True: try: count += 1 logging.info("Attempt {} to copy {} -> {}".format(count, obj_filename, obj_uri)) storage.copy(obj_filename, obj_uri) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)
def combine(expnum, ccd, prefix=None, type='p', field=None, measure3=MEASURE3 ): if field is None: field=str(expnum) if prefix is not None and len(prefix) > 0: field = "%s_%s" % ( prefix, field ) field += "_%s" % ( str(ccd)) for ext in ['moving.matt','moving.jmp']: fname = storage.get_image(expnum, ccd=ccd, prefix=prefix, version=type, ext=ext) if prefix is not None and len(prefix) > 0: planted = storage.get_image('Object', subdir=str(expnum)+"/ccd%s" % ( str(ccd).zfill(2)), version='', ext='planted') else: prefix = '' base_image = os.path.basename( storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=type, ext=None)) cmd_args = ['comb-list', prefix+str(expnum)+type+str(ccd).zfill(2)] util.exec_prog(cmd_args) ext_list = ['cands.comb'] if prefix is not None and len(prefix) > 0 : ext_list.extend( [ 'jmp.missed', 'matt.missed', 'jmp.found', 'matt.found', 'comb.missed', 'comb.found' ] ) for ext in ext_list: uri = storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=type, ext=ext) filename = os.path.basename(uri) if not os.access(filename,os.R_OK): logging.critical("No %s file" % (filename)) continue vospace_name = "%s.%s" % ( field, ext ) logging.info("%s -> %s" % ( filename, os.path.join(measure3, vospace_name))) storage.copy(filename, os.path.join(measure3, vospace_name)) base_name = prefix+str(expnum)+type+str(ccd).zfill(2) cands_file = base_name+'.cands.comb' if not os.access(cands_file,os.R_OK): nocands_file = ( prefix+ str(expnum)+ type+ str(ccd).zfill(2)+ '.no_candidates' ) open(nocands_file, 'w').close() vospace_name = "%s.no_candidates" % ( field ) storage.copy(nocands_file,os.path.join(measure3, vospace_name)) return storage.SUCCESS # get the images we need to compute x/y ra/dec transforms cands_file = mop_file.Parser().parse(cands_file) for file_id in cands_file.header.file_ids: rec_no=cands_file.header.file_ids.index(file_id) storage.get_image(expnum=cands_file.header.keywords['EXPNUM'][rec_no], ccd=ccd, version=type, prefix=prefix, ext='fits') cmd_args = ['measure3', prefix+str(expnum)+type+str(ccd).zfill(2)] logging.info("Running measure3") util.exec_prog(cmd_args) filename=base_name+".measure3.cands.astrom" vospace_filename = "%s.measure3.cands.astrom" % ( field) storage.copy(filename, os.path.join(measure3,vospace_filename)) return storage.SUCCESS
def step1(expnum, ccd, prefix='', version='p', sex_thresh=_SEX_THRESHOLD, wave_thresh=_WAVE_THRESHOLD, maxcount=_MAX_COUNT, dry_run=False): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader') filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] logging.info(util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)])) obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.jmp', prefix=prefix) obj_filename = basename + ".obj.jmp" if not dry_run: storage.copy(obj_filename, obj_uri) ## for step1matt we need the weight image hdulist = fits.open(filename) flat_name = hdulist[0].header.get('FLAT', 'weight.fits') parts = os.path.splitext(flat_name) if parts[1] == '.fz': flat_name = os.path.splitext(parts[0])[0] else: flat_name = parts[0] try: flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators') except: flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='old_calibrators') if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') if not os.access('weight.fits', os.R_OK): os.symlink(flat_filename, 'weight.fits') logging.info(util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)])) if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.matt', prefix=prefix) obj_filename = basename + ".obj.matt" if not dry_run: storage.copy(obj_filename, obj_uri) return True
def combine(expnum, ccd, prefix=None, file_type='p', field=None, measure3=MEASURE3, dry_run=False): if field is None: field = str(expnum) if prefix is not None and len(prefix) > 0: field = "%s_%s" % (prefix, field) field += "_%s%s" % (str(file_type), str(ccd)) logging.info("Doing combine on field {}".format(field)) for ext in ['moving.matt', 'moving.jmp']: storage.get_file(expnum, ccd=ccd, version=file_type, ext=ext, prefix=prefix) if prefix is not None and len(prefix) > 0: storage.get_file('Object', version='', ext='planted', subdir=str(expnum) + "/ccd%s" % (str(ccd).zfill(2))) else: prefix = '' cmd_args = [ 'comb-list', prefix + str(expnum) + file_type + str(ccd).zfill(2) ] logging.info(str(cmd_args)) logging.info(util.exec_prog(cmd_args)) ext_list = ['cands.comb'] if prefix is not None and len(prefix) > 0: ext_list.extend([ 'jmp.missed', 'matt.missed', 'jmp.found', 'matt.found', 'comb.missed', 'comb.found' ]) for ext in ext_list: uri = storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=file_type, ext=ext) filename = os.path.basename(uri) if not os.access(filename, os.R_OK): logging.critical("No %s file" % filename) continue vospace_name = "%s.%s" % (field, ext) if not dry_run: logging.info("%s -> %s" % (filename, os.path.join(measure3, vospace_name))) storage.copy(filename, os.path.join(measure3, vospace_name)) base_name = prefix + str(expnum) + file_type + str(ccd).zfill(2) cands_file = base_name + '.cands.comb' if not os.access(cands_file, os.R_OK): no_cands_file = (prefix + str(expnum) + file_type + str(ccd).zfill(2) + '.no_candidates') open(no_cands_file, 'w').close() if not dry_run: vospace_name = "%s.no_candidates" % field storage.copy(no_cands_file, os.path.join(measure3, vospace_name)) return storage.SUCCESS # get the images we need to compute x/y ra/dec transforms cands_file = mop_file.Parser().parse(cands_file) for file_id in cands_file.header.file_ids: rec_no = cands_file.header.file_ids.index(file_id) storage.get_image(expnum=cands_file.header.keywords['EXPNUM'][rec_no], ccd=ccd, version=file_type, ext='fits', prefix=prefix) cmd_args = [ 'measure3', prefix + str(expnum) + file_type + str(ccd).zfill(2) ] logging.info("Running measure3") logging.info(util.exec_prog(cmd_args)) if not dry_run: filename = base_name + ".measure3.cands.astrom" vospace_filename = "%s.measure3.cands.astrom" % field storage.copy(filename, os.path.join(measure3, vospace_filename)) return storage.SUCCESS
def align(expnums, ccd, version='s', prefix='', dry_run=False, force=True): """Create a 'shifts' file that transforms the space/flux/time scale of all images to the first image. This function relies on the .fwhm, .trans.jmp, .phot and .zeropoint.used files for inputs. The scaling we are computing here is for use in planting sources into the image at the same sky/flux locations while accounting for motions of sources with time. :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to, the first frame in the list is the reference. :param ccd: which ccd to work on. :param prefix: put this string in front of expnum when looking for exposure, normally '' or 'fk' :param force: When true run task even if this task is recorded as having succeeded :param version: Add sources to the 'o', 'p' or 's' images :param dry_run: don't push results to VOSpace. """ message = storage.SUCCESS if storage.get_status(task, prefix, expnums[0], version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format( task, prefix, expnums[0], version, ccd)) return # Get the images and supporting files that we need from the VOSpace area # get_image and get_file check if the image/file is already on disk. # re-computed fluxes from the PSF stars and then recompute x/y/flux scaling. # some dictionaries to hold the various scale pos = {} apcor = {} mags = {} zmag = {} mjdates = {} with storage.LoggingManager(task, prefix, expnums[0], ccd, version, dry_run): try: for expnum in expnums: filename = storage.get_image(expnum, ccd=ccd, version=version) zmag[expnum] = storage.get_zeropoint(expnum, ccd, prefix=None, version=version) mjdates[expnum] = float( fits.open(filename)[0].header.get('MJD-OBS')) apcor[expnum] = [ float(x) for x in open( storage.get_file( expnum, ccd=ccd, version=version, ext=storage.APCOR_EXT)).read().split() ] keys = ['crval1', 'cd1_1', 'cd1_2', 'crval2', 'cd2_1', 'cd2_2'] # load the .trans.jmp values into a 'wcs' like dictionary. # .trans.jmp maps current frame to reference frame in pixel coordinates. # the reference frame of all the frames supplied must be the same. shifts = dict( list( zip(keys, [ float(x) for x in open( storage.get_file( expnum, ccd=ccd, version=version, ext='trans.jmp')).read().split() ]))) shifts['crpix1'] = 0.0 shifts['crpix2'] = 0.0 # now create a wcs object based on those transforms, this wcs links the current frame's # pixel coordinates to the reference frame's pixel coordinates. w = get_wcs(shifts) # get the PHOT file that was produced by the mkpsf routine logging.debug("Reading .phot file {}".format(expnum)) phot = ascii.read(storage.get_file(expnum, ccd=ccd, version=version, ext='phot'), format='daophot') # compute the small-aperture magnitudes of the stars used in the PSF logging.debug("Running phot on {}".format(filename)) mags[expnum] = daophot.phot(filename, phot['XCENTER'], phot['YCENTER'], aperture=apcor[expnum][0], sky=apcor[expnum][1] + 1, swidth=apcor[expnum][0], zmag=zmag[expnum]) # covert the x/y positions to positions in Frame 1 based on the trans.jmp values. logging.debug( "Doing the XY translation to refrence frame: {}".format(w)) (x, y) = w.wcs_pix2world(mags[expnum]["XCENTER"], mags[expnum]["YCENTER"], 1) pos[expnum] = numpy.transpose([x, y]) # match this exposures PSF stars position against those in the first image of the set. logging.debug("Matching lists") idx1, idx2 = util.match_lists(pos[expnums[0]], pos[expnum]) # compute the magnitdue offset between the current frame and the reference. dmags = numpy.ma.array( mags[expnums[0]]["MAG"] - apcor[expnums[0]][2] - (mags[expnum]["MAG"][idx1] - apcor[expnum][2]), mask=idx1.mask) dmags.sort() # logging.debug("Computed dmags between input and reference: {}".format(dmags)) error_count = 0 error_count += 1 logging.debug("{}".format(error_count)) # compute the median and determine if that shift is small compared to the scatter. try: midx = int( numpy.sum(numpy.any([~dmags.mask], axis=0)) / 2.0) dmag = float(dmags[midx]) logging.debug("Computed a mag delta of: {}".format(dmag)) except Exception as e: logging.error(str(e)) logging.error( "Failed to compute mag offset between plant and found using: {}" .format(dmags)) dmag = 99.99 error_count += 1 logging.debug("{}".format(error_count)) try: if math.fabs(dmag) > 3 * (dmags.std() + 0.01): logging.warning( "Magnitude shift {} between {} and {} is large: {}" .format(dmag, expnums[0], expnum, shifts)) except Exception as e: logging.error(str(e)) error_count += 1 logging.debug("{}".format(error_count)) shifts['dmag'] = dmag shifts['emag'] = dmags.std() shifts['nmag'] = len(dmags.mask) - dmags.mask.sum() shifts['dmjd'] = mjdates[expnums[0]] - mjdates[expnum] shift_file = os.path.basename( storage.get_uri(expnum, ccd, version, '.shifts')) error_count += 1 logging.debug("{}".format(error_count)) try: fh = open(shift_file, 'w') fh.write( json.dumps(shifts, sort_keys=True, indent=4, separators=(',', ': '), cls=NpEncoder)) fh.write('\n') fh.close() except Exception as e: logging.error( "Creation of SHIFTS file failed while trying to write: {}" .format(shifts)) raise e error_count += 1 logging.debug("{}".format(error_count)) if not dry_run: storage.copy( shift_file, storage.get_uri(expnum, ccd, version, '.shifts')) logging.info(message) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)
def main(expnum, ccd): header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list( header.get('DATASEC', '[80:2080,30,4160]')) try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del (header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([ primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.' + ext name = "{}p{:02d}.{}".format(expnum, ccd, extension) try: os.unlink(name) os.unlink(name + ".fits") except: pass logging.info("Retrieving {}".format(name)) obj_file = mop_file.Parser(expnum, ccd, extension) obj_file.parse() t = numpy.all([ datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1], datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3] ], axis=0) logging.info("Source remaining after datasec cut: {} of {}".format( len(obj_file.data[t]['X']), len(t))) table_hdu = fits.table_to_hdu(obj_file.data[t]) table_hdu.header['CATALOG'] = name table_hdu.header['EXTNAME'] = ext hdu_list.append(table_hdu) del table_hdu del obj_file os.unlink(name) name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits') if os.access(name, os.F_OK): os.unlink(name) hdu_list.writeto(name) uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits") logging.info(name + " -> " + uri) count = 0 while True: print(("Copy attempt {}".format(count))) try: storage.copy(name, uri) os.unlink(name) break except Exception as ex: if count > 10: raise ex count += 1
def step1(expnum, ccd, prefix='', version='p', fwhm=4, sex_thresh=1.3, wave_thresh=2.7, maxcount=30000): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) mopheader = storage.get_image(expnum, ccd, version=version, ext='mopheader', prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] outfile = util.exec_prog([ 'step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount) ]) obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.jmp', prefix=prefix) obj_filename = basename + ".obj.jmp" storage.copy(obj_filename, obj_uri) ## for step1matt we need the weight image flat_name = fits.open(filename)[0].header['FLAT'] flat_name = flat_name[0:-5] flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators', rescale=False) if not os.access('weight.fits', os.R_OK): os.symlink(flat_filename, 'weight.fits') outfile = util.exec_prog([ 'step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount) ]) obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.matt', prefix=prefix) obj_filename = basename + ".obj.matt" storage.copy(obj_filename, obj_uri) return True
def run(expnum, ccd, prefix='', version='p', sex_thresh=_SEX_THRESHOLD, wave_thresh=_WAVE_THRESHOLD, maxcount=_MAX_COUNT, dry_run=False, force=True): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, version, ccd): raise IOError(35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}".format( task, dependency, prefix, expnum, version, ccd)) logging.info("Retrieving imaging and input parameters from VOSpace") storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader') filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] _get_weight_map(filename, ccd) logging.info("Launching step1jmp") logging.info(util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)])) logging.info(util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)])) if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') if not dry_run: for ext in ['obj.jmp', 'obj.matt']: obj_uri = storage.get_uri(expnum, ccd, version=version, ext=ext, prefix=prefix) obj_filename = basename + "." + ext count = 0 with open(obj_filename, 'r'): while True: try: count += 1 logging.info("Attempt {} to copy {} -> {}".format(count, obj_filename, obj_uri)) storage.copy(obj_filename, obj_uri) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)
def run(expnum, ccd, prefix=None, version='p', field=None, measure3_dir=storage.MEASURE3, dry_run=False, force=False): message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info( "{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, version, ccd): raise IOError(35, "Cannot start {} as {} not yet completed " "for {}{}{}{:02d}".format(task, dependency, prefix, expnum, version, ccd)) if field is None: field = str(expnum) if prefix is not None and len(prefix) > 0: field = "%s_%s" % (prefix, field) field += "_%s%s" % (str(version), str(ccd)) logging.info("Doing combine on field {}".format(field)) for ext in ['moving.matt', 'moving.jmp']: storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) # Get the list of objects planted into the field if prefix='fk' if prefix == 'fk': storage.get_file('Object', version='', ext='planted', subdir=str(expnum) + "/ccd%s" % ( str(ccd).zfill(2))) else: prefix = '' cmd_args = ['comb-list', prefix + str(expnum) + version + str(ccd).zfill(2)] logging.info(str(cmd_args)) logging.info(util.exec_prog(cmd_args)) # things to copy back to VOSpace, if this is an 'fk' image # then we have missed and found files too. ext_list = ['cands.comb'] if prefix == 'fk': ext_list.extend(['jmp.missed', 'matt.missed', 'jmp.found', 'matt.found', 'comb.missed', 'comb.found']) for ext in ext_list: uri = storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=version, ext=ext) filename = os.path.basename(uri) if not os.access(filename, os.R_OK): logging.critical("No %s file" % filename) continue vospace_name = "%s.%s" % (field, ext) if not dry_run: logging.info("%s -> %s" % ( filename, os.path.join(measure3_dir, vospace_name))) storage.copy(filename, os.path.join(measure3_dir, vospace_name)) base_name = prefix + str(expnum) + version + str(ccd).zfill(2) cands_file = base_name + '.cands.comb' if not os.access(cands_file, os.R_OK): no_cands_file = (prefix + str(expnum) + version + str(ccd).zfill(2) + '.no_candidates') open(no_cands_file, 'w').close() if not dry_run: vospace_name = "%s.no_candidates" % field storage.copy(no_cands_file, os.path.join(measure3_dir, vospace_name)) else: measure3.run(base_name, storage.DBIMAGES) if not dry_run: filename = base_name + ".measure3.cands.astrom" vospace_filename = "%s.measure3.cands.astrom" % field storage.copy(filename, os.path.join(measure3_dir, vospace_filename)) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)
len(obj_file.data[t]['X']), len(t)))) table_hdu = fits.table_to_hdu(obj_file.data[t]) table_hdu.header['CATALOG'] = name table_hdu.header['EXTNAME'] = ext hdu_list.append(table_hdu) del table_hdu del obj_file os.unlink(name) name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits') if os.access(name, os.F_OK): os.unlink(name) hdu_list.writeto(name) uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits") print((name + " -> " + uri)) count = 0 while count < 10: print(("Copy attempt {}".format(count))) try: storage.copy(name, uri) os.unlink(name) break except Exception as ex: print(ex) count += 1 continue except Exception as ex: print(ex) sys.exit(0)
def do_synchronize(self, local_path): remote_uri = self.get_remote_uri(local_path) logger.info("Syncing %s to %s." % (local_path, remote_uri)) storage.copy(local_path, remote_uri)
def run(expnums, ccd, version, prefix=None, dry_run=False, default="WCS", force=False): """run the actual step2 on the given exp/ccd combo""" jmp_trans = ['step2ajmp'] jmp_args = ['step2bjmp'] matt_args = ['step2matt_jmp'] if storage.get_status(task, prefix, expnums[0], version, ccd) and not force: logging.info("{} completed successfully for {}{}{}{:02d}".format( task, prefix, expnums[0], version, ccd)) return with storage.LoggingManager(task, prefix, expnums[0], ccd, version, dry_run): try: for expnum in expnums: if not storage.get_status( dependency, prefix, expnum, version=version, ccd=ccd): raise IOError( 35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}" .format(task, dependency, prefix, expnum, version, ccd)) message = storage.SUCCESS idx = 0 logging.info("Retrieving catalog files to do matching.") for expnum in expnums: jmp_args.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]) jmp_trans.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]) idx += 1 matt_args.append('-f%d' % idx) matt_args.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.matt', prefix=prefix)[0:-9]) logging.info( "Computing the catalog alignment using sources in catalogs.") try: logging.info(util.exec_prog(jmp_trans)) if default == "WCS": logging.info("Comparing computed transform to WCS values") logging.info( compute_trans(expnums, ccd, version, prefix, default=default)) except Exception as ex: logging.info("JMP Trans failed: {}".format(ex)) logging.info( compute_trans(expnums, ccd, version, prefix, default="WCS")) logging.info("Using transform to match catalogs for three images.") logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) # check that the shifts from step2 are rational by matching the bright star lists. logging.info( "Uisng checktrans to ensure that transforms were reasonable.") check_args = ['checktrans'] if os.access('proc-these-files', os.R_OK): os.unlink('proc-these-files') ptf = open('proc-these-files', 'w') ptf.write( "# A dummy file that is created so checktrans could run.\n") ptf.write("# Frame FWHM PSF?\n") for expnum in expnums: filename = os.path.splitext( storage.get_image(expnum, ccd, version=version, prefix=prefix))[0] if not os.access(filename + ".bright.psf", os.R_OK): os.link(filename + ".bright.jmp", filename + ".bright.psf") if not os.access(filename + ".obj.psf", os.R_OK): os.link(filename + ".obj.jmp", filename + ".obj.psf") ptf.write("{:>19s}{:>10.1f}{:>5s}\n".format( filename, _FWHM, "NO")) ptf.close() if os.access('BAD_TRANS', os.F_OK): os.unlink('BAD_TRANS') logging.info(util.exec_prog(check_args)) if os.access('BAD_TRANS', os.F_OK): raise OSError(errno.EBADMSG, 'BAD_TRANS') if os.access('proc-these-files', os.F_OK): os.unlink('proc-these-files') if dry_run: return for expnum in expnums: for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']: uri = storage.dbimages_uri(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = os.path.basename(uri) storage.copy(filename, uri) except Exception as ex: message = str(ex) logging.error(message) storage.set_status(task, prefix, expnums[0], version, ccd, status=message) return
def run(expnum, ccd, version, dry_run=False, prefix="", force=False): """Run the OSSOS jmpmakepsf script. """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format( task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status( dependency, prefix, expnum, version, ccd=ccd): raise IOError("{} not yet run for {}".format( dependency, expnum)) # confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) # get image from the vospace storage area logging.info("Getting fits image from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) # get mopheader from the vospace storage area logging.info("Getting mopheader from VOSpace") mopheader_filename = storage.get_file(expnum, ccd, version=version, prefix=prefix, ext='mopheader') # run mkpsf process logging.info("Running mkpsf on %s %d" % (expnum, ccd)) logging.info( util.exec_prog( ['jmpmakepsf.csh', './', filename, 'yes', 'yes'])) if dry_run: return # place the results into VOSpace basename = os.path.splitext(filename)[0] for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext=ext) source = basename + "." + str(ext) count = 0 with open(source, 'r'): while True: count += 1 try: logging.info("Attempt {} to copy {} -> {}".format( count, source, dest)) storage.copy(source, dest) break except Exception as ex: if count > 10: raise ex # set some data parameters associated with the image, determined in this step. storage.set_status('fwhm', prefix, expnum, version=version, ccd=ccd, status=str( storage.get_fwhm(expnum, ccd=ccd, prefix=prefix, version=version))) storage.set_status('zeropoint', prefix, expnum, version=version, ccd=ccd, status=str( storage.get_zeropoint(expnum, ccd=ccd, prefix=prefix, version=version))) logging.info(message) except Exception as e: message = str(e) logging.error(message) storage.set_status(task, prefix, expnum, version, ccd=ccd, status=message) return
def main(): """Do the script.""" parser = argparse.ArgumentParser( description='replace image header') parser.add_argument('--extname', help='name of extension to in header') parser.add_argument('expnum', type=str, help='exposure to update') parser.add_argument('-r', '--replace', action='store_true', help='store modified image back to VOSpace?') parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('--debug', action='store_true') parser.add_argument('--force', action='store_true', help="Re-run even if previous success recorded") parser.add_argument('--dbimages', help="VOSpace DATA storage area.", default="vos:OSSOS/dbimages") args = parser.parse_args() task = util.task() dependency = 'preproc' prefix = "" storage.DBIMAGES = args.dbimages level = logging.CRITICAL message_format = "%(message)s" if args.verbose: level = logging.INFO if args.debug: level = logging.DEBUG message_format = "%(module)s %(funcName)s %(lineno)s %(message)s" logging.basicConfig(level=level, format=message_format) storage.set_logger(task, prefix, args.expnum, None, None, False) message = storage.SUCCESS expnum = args.expnum exit_status = 0 try: # skip if already succeeded and not in force mode if storage.get_status(task, prefix, expnum, "p", 36) and not args.force: logging.info("Already updated, skipping") sys.exit(0) image_hdulist = storage.get_image(args.expnum, return_file=False) ast_hdulist = storage.get_astheader(expnum, ccd=None) run_update_header(image_hdulist, ast_hdulist) image_filename = os.path.basename(storage.get_uri(expnum)) image_hdulist.writeto(image_filename) if args.replace: dest = storage.dbimages_uri(expnum) storage.copy(image_filename, dest) storage.set_status('update_header', "", expnum, 'p', 36, message) except Exception as e: message = str(e) if args.replace: storage.set_status(task, prefix, expnum, 'p', 36, message) exit_status = message logging.error(message) return exit_status
def main(): """Do the script.""" parser = argparse.ArgumentParser(description='replace image header') parser.add_argument('--extname', help='name of extension to in header') parser.add_argument('expnum', type=str, help='exposure to update') parser.add_argument('-r', '--replace', action='store_true', help='store modified image back to VOSpace?') parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('--debug', action='store_true') parser.add_argument('--force', action='store_true', help="Re-run even if previous success recorded") parser.add_argument('--dbimages', help="VOSpace DATA storage area.", default="vos:OSSOS/dbimages") args = parser.parse_args() task = util.task() dependency = 'preproc' prefix = "" storage.DBIMAGES = args.dbimages level = logging.CRITICAL message_format = "%(message)s" if args.verbose: level = logging.INFO if args.debug: level = logging.DEBUG message_format = "%(module)s %(funcName)s %(lineno)s %(message)s" logging.basicConfig(level=level, format=message_format) storage.set_logger(task, prefix, args.expnum, None, None, False) message = storage.SUCCESS expnum = args.expnum exit_status = 0 try: # skip if already succeeded and not in force mode if storage.get_status(task, prefix, expnum, "p", 36) and not args.force: logging.info("Already updated, skipping") sys.exit(0) image_hdulist = storage.get_image(args.expnum, return_file=False) ast_hdulist = storage.get_astheader(expnum, ccd=None) run_update_header(image_hdulist, ast_hdulist) image_filename = os.path.basename(storage.get_uri(expnum)) image_hdulist.writeto(image_filename) if args.replace: dest = storage.dbimages_uri(expnum) storage.copy(image_filename, dest) storage.set_status('update_header', "", expnum, 'p', 36, message) except Exception as e: message = str(e) if args.replace: storage.set_status(task, prefix, expnum, 'p', 36, message) exit_status = message logging.error(message) return exit_status
def run(expnum, ccd, version='p', prefix='', dry_run=False, force=False): message = 'success' if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format( task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task=task, prefix=prefix, expnum=expnum, ccd=ccd, version=version, dry_run=dry_run): try: if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd): raise IOError("{} not yet run for {}".format( dependency, expnum)) header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list( header.get('DATASEC', '[80:2080,30,4160]')) try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del (header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([ primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.' + ext name = "{}p{:02d}.{}".format(expnum, ccd, extension) try: os.unlink(name) os.unlink(name + ".fits") except: pass logging.info("Retrieving {}".format(name)) obj_file = mop_file.Parser(expnum, ccd, extension) obj_file.parse() t = numpy.all([ datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1], datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3] ], axis=0) logging.info( "Source remaining after datasec cut: {} of {}".format( len(obj_file.data[t]['X']), len(t))) table_hdu = fits.table_to_hdu(obj_file.data[t]) table_hdu.header['CATALOG'] = name table_hdu.header['EXTNAME'] = ext hdu_list.append(table_hdu) del table_hdu del obj_file os.unlink(name) name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits') if os.access(name, os.F_OK): os.unlink(name) hdu_list.writeto(name) uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits") logging.info(name + " -> " + uri) count = 0 with open(name): while True: count += 1 logging.info("Copy attempt {}".format(count)) try: storage.copy(name, uri) os.unlink(name) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as e: message = str(e) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version=version, ccd=ccd, status=message)
def main(): parser = argparse.ArgumentParser() parser.add_argument('field') parser.add_argument('ccd') parser.add_argument( '--expnum', default=None, help="Which exposure is the lead for this astrom file?") parser.add_argument( '--astrom-filename', default=None, help="Give the astrom file directly instead of looking-up " "using the field/ccd naming scheme.") parser.add_argument('--reals', action='store_true', default=False) parser.add_argument('--type', choices=['o', 'p', 's'], help="Which type of image.", default='s') parser.add_argument('--measure3', default='vos:OSSOS/measure3/2013B-L_redo/') parser.add_argument('--dbimages', default=None) parser.add_argument('--dry-run', action='store_true', default=False) parser.add_argument('--force', action='store_true', default=False) parser.add_argument("--fk", action="store_true", default=False, help="Do fakes?") parser.add_argument('--object-planted', default=OBJECT_PLANTED, help="Name of file contains list of planted objects.") parser.add_argument( '--bright-limit', default=BRIGHT_LIMIT, type=float, help= "Sources brighter than this limit {} are used to diagnose planting issues." .format(BRIGHT_LIMIT)) parser.add_argument( '--minimum-bright-detections', default=MINIMUM_BRIGHT_DETECTIONS, type=int, help= "required number of detections with mag brighter than bright-limit.") parser.add_argument( '--minimum-bright-fraction', default=MINIMUM_BRIGHT_FRACTION, type=float, help= "minimum fraction of objects above bright limit that should be found.") parser.add_argument("--debug", action="store_true") parser.add_argument("--verbose", "-v", action="store_true") cmd_line = " ".join(sys.argv) args = parser.parse_args() util.set_logger(args) logging.info("Starting {}".format(cmd_line)) prefix = (args.fk and "fk") or "" version = args.type ext = 'cands' if args.reals: ext = 'reals' if args.dbimages is not None: storage.DBIMAGES = args.dbimages astrom.DATASET_ROOT = args.dbimages storage.MEASURE3 = args.measure3 astrom_uri = storage.get_cands_uri(args.field, ccd=args.ccd, version=args.type, prefix=prefix, ext="measure3.{}.astrom".format(ext)) if args.astrom_filename is None: astrom_filename = os.path.basename(astrom_uri) else: astrom_filename = args.astrom_filename if not os.access(astrom_filename, os.F_OK): astrom_filename = os.path.dirname(astrom_uri) + "/" + astrom_filename # Load the list of astrometric observations that will be looked at. fk_candidate_observations = astrom.parse(astrom_filename) if args.expnum is None: expnum = fk_candidate_observations.observations[0].expnum else: expnum = args.expnum message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version='', ccd=args.ccd) and not args.force: logging.info("{} completed successfully for {} {} {} {}".format( task, prefix, expnum, '', args.ccd)) return with storage.LoggingManager(task, prefix, expnum, args.ccd, version, args.dry_run): try: match_filename = os.path.splitext( os.path.basename(astrom_filename))[0] + '.match' logging.info(f'Got back:{match_filename}') match_uri = storage.get_cands_uri( args.field, ccd=args.ccd, version=args.type, prefix=prefix, ext="measure3.{}.match".format(ext), block=args.field) try: storage.copy(match_uri, match_filename) except NotFoundException as ex: logging.warning( f'No match file found at {match_uri}, creating one.') logging.debug(f'{ex}') pass logging.info( ("Comparing planted and measured magnitudes " "for sources in {} and {}\n".format(args.object_planted, astrom_filename))) result = match_planted( fk_candidate_observations, match_filename=match_filename, object_planted=args.object_planted, bright_limit=args.bright_limit, minimum_bright_detections=args.minimum_bright_detections, bright_fraction=args.minimum_bright_fraction) if not args.dry_run: storage.copy(match_filename, match_uri) uri = os.path.dirname(astrom_uri) keys = [storage.tag_uri(os.path.basename(astrom_uri))] values = [result] storage.set_tags_on_uri(uri, keys, values) logger.info(message) except Exception as err: import traceback traceback.print_exc() message = str(err) logging.error(message) if not args.dry_run: storage.set_status(task, prefix, expnum, version='', ccd=args.ccd, status=message) return
def combine(expnum, ccd, prefix=None, type='p'): for ext in ['moving.matt', 'moving.jmp']: fname = storage.get_image(expnum, ccd=ccd, prefix=prefix, version=type, ext=ext) if prefix is not None and len(prefix) > 0: planted = storage.get_image('Object', subdir=str(expnum) + "/ccd%s" % (str(ccd).zfill(2)), version='', ext='planted') else: prefix = '' base_image = os.path.basename( storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=type, ext=None)) cmd_args = ['comb-list', prefix + str(expnum) + type + str(ccd).zfill(2)] util.exec_prog(cmd_args) for ext in [ 'cands.comb', 'comb.found', 'comb.missed', 'jmp.found', 'jmp.missed', 'matt.found', 'matt.missed' ]: uri = storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=type, ext=ext) filename = os.path.basename(uri) if not os.access(filename, os.R_OK): logging.critical("No %s file" % (filename)) continue storage.copy(filename, uri) base_name = prefix + str(expnum) + type + str(ccd).zfill(2) cands_file = base_name + '.cands.comb' if not os.access(cands_file, os.R_OK): nocands_file = (prefix + str(expnum) + type + str(ccd).zfill(2) + '.no_candidates') open(nocands_file, 'w').close() storage.copy(nocands_file, 'vos:OSSOS/measure3/' + nocands_file) return storage.SUCCESS cands_file = mop_file.Parser().parse(cands_file) for file_id in cands_file.header.file_ids: rec_no = cands_file.header.file_ids.index(file_id) storage.get_image(expnum=cands_file.header.keywords['EXPNUM'][rec_no], ccd=ccd, version=type, prefix=prefix, ext='fits') cmd_args = ['measure3', prefix + str(expnum) + type + str(ccd).zfill(2)] util.exec_prog(cmd_args) filename = base_name + ".measure3.cands.astrom" storage.copy(filename, 'vos:OSSOS/measure3/' + filename) return storage.SUCCESS
message = storage.SUCCESS try: image = (os.access(args.expnum,os.W_OK) and args.expnum ) or ( storage.get_image(args.expnum) ) header = (args.header is not None and (( os.access(args.header, os.W_OK) and args.header ) or ( storage.get_image(args.header, ext='head')))) or ( storage.get_image(args.expnum, ext='head')) logging.info( "Swapping header for %s for contents in %s \n" % ( image, header) ) run_update_header(image, header) if args.replace: expnum = args.expnum or fits.open(image)[0].header['EXPNUM'] dest = storage.dbimages_uri(expnum) storage.copy(image, dest) storage.set_status(args.expnum, 36, 'update_header', message) sys.exit(0) except Exception as e: logging.error("Error replacing header for %s" % ( args.expnum)) logging.error(str(e)) message = str(e) storage.set_status(args.expnum, 36, 'update_header', message) sys.exit(2)
logging.basicConfig(level=logging.INFO, format="%(message)s") message = storage.SUCCESS try: image = (os.access(args.expnum, os.W_OK) and args.expnum) or (storage.get_image(args.expnum)) header = (args.header is not None and ((os.access(args.header, os.W_OK) and args.header) or (storage.get_image(args.header, ext='head')))) or ( storage.get_image(args.expnum, ext='head')) logging.info("Swapping header for %s for contents in %s \n" % (image, header)) run_update_header(image, header) if args.replace: expnum = args.expnum or fits.open(image)[0].header['EXPNUM'] dest = storage.dbimages_uri(expnum) storage.copy(image, dest) storage.set_status(args.expnum, 36, 'update_header', message) sys.exit(0) except Exception as e: logging.error("Error replacing header for %s" % (args.expnum)) logging.error(str(e)) message = str(e) storage.set_status(args.expnum, 36, 'update_header', message) sys.exit(2)