def get_iq_and_zeropoint(image, header_extract): # Options: it can be in the image tag, it can be in the file if that's been created. fwhm = storage.get_tag(image, 'fwhm_22') # 22 is the standard chip: mid lower if fwhm is None: # not in tag, maybe in the file: does that exist? sys.stdout.write('...no fwhm vtag. Instead trying file. ') try: fwhm = storage.get_fwhm(image, ccd=22) except: # no file yet then either fwhm = None sys.stdout.write('...fwhm not yet measured. ') if fwhm is not None: iq = float(fwhm) * 0.1850 # plate scale is 0.1850 arcsec/pixel header_extract['iq_ossos'] = iq else: header_extract['iq_ossos'] = None # This will need to be updated in the future, then. sys.stdout.write('Storing IQ: %s\n' % str(header_extract['iq_ossos'])) zeropt = storage.get_tag(image, 'zeropoint_22') if zeropt is None: sys.stdout.write('...no zeropoint vtag. Instead trying file. ') try: zeropt = storage.get_zeropoint(image, ccd=22) except: # no file yet then either zeropt = None sys.stdout.write('...zeropoint not yet measured. ') header_extract['zeropt'] = zeropt sys.stdout.write('Storing zeropoint: %s\n' % str(header_extract['zeropt'])) return header_extract
def get_iq_and_zeropoint(image, header_extract): # Options: it can be in the image tag, it can be in the file if that's been created. fwhm = storage.get_tag(image, 'fwhm_22') # 22 is the standard chip: mid lower if fwhm is None: # not in tag, maybe in the file: does that exist? sys.stdout.write('...no fwhm vtag. Instead trying file. ') try: fwhm = storage.get_fwhm(image, ccd=22) except: # no file yet then either fwhm = None sys.stdout.write('...fwhm not yet measured. ') if fwhm is not None: iq = float(fwhm) * 0.1850 # plate scale is 0.1850 arcsec/pixel header_extract['iq_ossos'] = iq else: header_extract[ 'iq_ossos'] = None # This will need to be updated in the future, then. sys.stdout.write('Storing IQ: %s\n' % str(header_extract['iq_ossos'])) zeropt = storage.get_tag(image, 'zeropoint_22') if zeropt is None: sys.stdout.write('...no zeropoint vtag. Instead trying file. ') try: zeropt = storage.get_zeropoint(image, ccd=22) except: # no file yet then either zeropt = None sys.stdout.write('...zeropoint not yet measured. ') header_extract['zeropt'] = zeropt sys.stdout.write('Storing zeropoint: %s\n' % str(header_extract['zeropt'])) return header_extract
def main(expnum, ccd): header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list(header.get('DATASEC', '[80:2080,30,4160]')) try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del(header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.'+ext name = "{}p{:02d}.{}".format(expnum, ccd, extension) try: os.unlink(name) os.unlink(name+".fits") except: pass logging.info("Retrieving {}".format(name)) obj_file = mop_file.Parser(expnum, ccd, extension) obj_file.parse() t = numpy.all([datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1], datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3]], axis=0) logging.info("Source remaining after datasec cut: {} of {}".format(len(obj_file.data[t]['X']), len(t))) table_hdu = fits.table_to_hdu(obj_file.data[t]) table_hdu.header['CATALOG'] = name table_hdu.header['EXTNAME'] = ext hdu_list.append(table_hdu) del table_hdu del obj_file os.unlink(name) name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits') if os.access(name, os.F_OK): os.unlink(name) hdu_list.writeto(name) uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits") logging.info(name+" -> "+uri) count = 0 while True: print("Copy attempt {}".format(count)) try: storage.copy(name, uri) os.unlink(name) break except Exception as ex: if count > 10: raise ex count += 1
def plant(expnums, ccd, rmin, rmax, ang, width, version='s'): '''run the plant script on this combination of exposures''' ptf = open('proc-these-files','w') ptf.write("# Files to be planted and search\n") ptf.write("# image fwhm plant\n") for expnum in expnums: fwhm = storage.get_fwhm(expnum,ccd) filename = storage.get_image(expnum, ccd=ccd, version=version) ptf.write("%s %3.1f YES\n" % ( filename[0:-5], fwhm )) for ext in ['apcor', 'obj.jmp', 'trans.jmp', 'psf.fits', 'mopheader', 'phot', 'zeropoint.used']: apcor = storage.get_image(expnum, ccd=ccd, version='s', ext=ext) ptf.close() cmd_args = ['plant.csh',os.curdir, str(rmin), str(rmax), str(ang), str(width)] util.exec_prog(cmd_args) if args.dryrun: # Don't push back to VOSpace return uri = storage.get_uri('Object',ext='planted',version='', subdir=str( expnums[0])+"/ccd%s" % (str(ccd).zfill(2))) storage.copy('Object.planted',uri) uri = os.path.join(os.path.dirname(uri), 'shifts') storage.copy('shifts', uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext='fits', prefix='fk') filename = os.path.basename(uri) storage.copy(filename, uri) for ext in ['mopheader', 'psf.fits', 'fwhm', 'apcor', 'zeropoint.used', 'trans.jmp']: storage.delete(expnum, ccd, 's', ext, prefix='fk') storage.vlink(expnum, ccd, 'p', ext, expnum, ccd, 's', ext, l_prefix='fk') return
def step1(expnum, ccd, prefix='', version='p', fwhm=4, sex_thresh=1.3, wave_thresh=2.7, maxcount=30000): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) mopheader = storage.get_image(expnum, ccd, version=version, ext='mopheader', prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] outfile = util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)]) obj_uri = storage.get_uri(expnum,ccd,version=version,ext='obj.jmp', prefix=prefix) obj_filename = basename+".obj.jmp" storage.copy(obj_filename,obj_uri) ## for step1matt we need the weight image hdulist = fits.open(filename) flat_name = hdulist[0].header.get('FLAT','06Bm02.flat.r.36.01.fits') flat_name = flat_name[0:-5] flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators', rescale=False) if not os.access('weight.fits',os.R_OK): os.symlink(flat_filename, 'weight.fits') outfile = util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)]) obj_uri = storage.get_uri(expnum,ccd,version=version,ext='obj.matt', prefix=prefix) obj_filename = basename+".obj.matt" storage.copy(obj_filename,obj_uri) return True
def detect_mbc(family_name, object_name, expnum, phot_table, test): """ Compare psf of asteroid with mean of stars to detect possible activity """ # read in asteroid identification values from the photometry output asteroid_id = phot_table.query( '{} == "{}" & {} == "{}"'.format(_OBJECT_HEADER, object_name, _EXPNUM_HEADER, expnum)) print asteroid_id assert len(asteroid_id) == 1, 'No object or multiple objects identified' # read in postage stamp header and data, do photometry to measure background (needed for saturation check) header, exp_data, fits_file = fits_data(object_name, expnum, family_name) # make sure that a mean star psf has been created form the OSSOS pipeline if not storage.get_status(expnum.strip('p'), header[_CCD].split('d')[1], 'mkpsf'): print '>> PSF does not exist' ast_sky_psf = build_ast_profile(asteroid_id, exp_data, fwhm, family_name) # to build cutout of object write_no_mkpsf(family_name, '{} {}'.format(expnum.strip('p'), header[_CCD].split('d')[1])) return # reject any object too bright that will definetly be saturated mag = asteroid_id[_MAG_HEADER].values[0] if mag < 18.5: print '>> Object is too bright for accurate photometry' ast_sky_psf = build_ast_profile(asteroid_id, exp_data, fwhm, family_name) # to build cutout of object write_too_bright(family_name, asteroid_id) return # get fwhm from OSSOS VOSpace file fwhm = storage.get_fwhm(expnum.strip('p'), header[_CCD].split('d')[1]) bkg, flux, fluxerr = sep_phot(exp_data, asteroid_id) ast_sky_psf = build_ast_profile(asteroid_id, exp_data, fwhm, family_name) ast_psf = np.subtract(ast_sky_psf, bkg) try: star_psf = build_star_profile(ast_psf, expnum, header, asteroid_id, fwhm, flux) except Exception, e: print 'Error calculating star psf: {}'.format(e) return
def plant(expnums, ccd, rmin, rmax, ang, width, version="s"): """run the plant script on this combination of exposures""" ptf = open("proc-these-files", "w") ptf.write("# Files to be planted and search\n") ptf.write("# image fwhm plant\n") for expnum in expnums: fwhm = storage.get_fwhm(expnum, ccd, version=version) filename = storage.get_image(expnum, ccd=ccd, version=version) ptf.write("%s %3.1f YES\n" % (filename[0:-5], fwhm)) for ext in ["apcor", "obj.jmp", "trans.jmp", "psf.fits", "mopheader", "phot", "zeropoint.used"]: apcor = storage.get_image(expnum, ccd=ccd, version=version, ext=ext) ptf.close() cmd_args = ["plant.csh", os.curdir, str(rmin), str(rmax), str(ang), str(width)] util.exec_prog(cmd_args) if args.dryrun: # Don't push back to VOSpace return uri = storage.get_uri("Object", ext="planted", version="", subdir=str(expnums[0]) + "/ccd%s" % (str(ccd).zfill(2))) storage.copy("Object.planted", uri) uri = os.path.join(os.path.dirname(uri), "plant.shifts") storage.copy("shifts", uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext="fits", prefix="fk") filename = os.path.basename(uri) storage.copy(filename, uri) for ext in ["mopheader", "psf.fits", "fwhm", "apcor", "zeropoint.used", "trans.jmp"]: storage.delete(expnum, ccd, "s", ext, prefix="fk") storage.vlink(expnum, ccd, "s", ext, expnum, ccd, "s", ext, l_prefix="fk") return
ccdlist = [args.ccd] for expnum in args.expnum: for ccd in ccdlist: if storage.get_status(expnum, ccd, 'mkpsf', version=args.type) and not args.force: logging.info("Already did %s %s, skipping" %( str(expnum), str(ccd))) continue try: message = 'success' mkpsf(expnum, ccd, args.type) storage.set_status(expnum, ccd, 'fwhm', version=args.type, status=str(storage.get_fwhm( expnum, ccd, version=args.type))) storage.set_status(expnum, ccd, 'zeropoint', version=args.type, status=str(storage.get_zeropoint( expnum, ccd, version=args.type))) except Exception as e: message = str(e) logging.error(message) storage.set_status( expnum, ccd, 'mkpsf', version=args.type,
def run(expnum, ccd, prefix='', version='p', sex_thresh=_SEX_THRESHOLD, wave_thresh=_WAVE_THRESHOLD, maxcount=_MAX_COUNT, dry_run=False, force=True): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, version, ccd): raise IOError(35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}".format( task, dependency, prefix, expnum, version, ccd)) logging.info("Retrieving imaging and input parameters from VOSpace") storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader') filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] _get_weight_map(filename, ccd) logging.info("Launching step1jmp") logging.info(util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)])) logging.info(util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)])) if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') if not dry_run: for ext in ['obj.jmp', 'obj.matt']: obj_uri = storage.get_uri(expnum, ccd, version=version, ext=ext, prefix=prefix) obj_filename = basename + "." + ext count = 0 with open(obj_filename, 'r'): while True: try: count += 1 logging.info("Attempt {} to copy {} -> {}".format(count, obj_filename, obj_uri)) storage.copy(obj_filename, obj_uri) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)
def run(expnum, ccd, version='p', prefix='', dry_run=False, force=False): message = 'success' if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format( task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task=task, prefix=prefix, expnum=expnum, ccd=ccd, version=version, dry_run=dry_run): try: if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd): raise IOError("{} not yet run for {}".format( dependency, expnum)) header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list( header.get('DATASEC', '[80:2080,30,4160]')) try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del (header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([ primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.' + ext name = "{}p{:02d}.{}".format(expnum, ccd, extension) try: os.unlink(name) os.unlink(name + ".fits") except: pass logging.info("Retrieving {}".format(name)) obj_file = mop_file.Parser(expnum, ccd, extension) obj_file.parse() t = numpy.all([ datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1], datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3] ], axis=0) logging.info( "Source remaining after datasec cut: {} of {}".format( len(obj_file.data[t]['X']), len(t))) table_hdu = fits.table_to_hdu(obj_file.data[t]) table_hdu.header['CATALOG'] = name table_hdu.header['EXTNAME'] = ext hdu_list.append(table_hdu) del table_hdu del obj_file os.unlink(name) name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits') if os.access(name, os.F_OK): os.unlink(name) hdu_list.writeto(name) uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits") logging.info(name + " -> " + uri) count = 0 with open(name): while True: count += 1 logging.info("Copy attempt {}".format(count)) try: storage.copy(name, uri) os.unlink(name) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as e: message = str(e) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version=version, ccd=ccd, status=message)
def step1(expnum, ccd, prefix='', version='p', sex_thresh=_SEX_THRESHOLD, wave_thresh=_WAVE_THRESHOLD, maxcount=_MAX_COUNT, dry_run=False): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader') filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] logging.info(util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)])) obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.jmp', prefix=prefix) obj_filename = basename + ".obj.jmp" if not dry_run: storage.copy(obj_filename, obj_uri) ## for step1matt we need the weight image hdulist = fits.open(filename) flat_name = hdulist[0].header.get('FLAT', 'weight.fits') parts = os.path.splitext(flat_name) if parts[1] == '.fz': flat_name = os.path.splitext(parts[0])[0] else: flat_name = parts[0] try: flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators') except: flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='old_calibrators') if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') if not os.access('weight.fits', os.R_OK): os.symlink(flat_filename, 'weight.fits') logging.info(util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)])) if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.matt', prefix=prefix) obj_filename = basename + ".obj.matt" if not dry_run: storage.copy(obj_filename, obj_uri) return True
def main(expnum, ccd): header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list( header.get('DATASEC', '[80:2080,30,4160]')) try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del (header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([ primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.' + ext name = "{}p{:02d}.{}".format(expnum, ccd, extension) try: os.unlink(name) os.unlink(name + ".fits") except: pass logging.info("Retrieving {}".format(name)) obj_file = mop_file.Parser(expnum, ccd, extension) obj_file.parse() t = numpy.all([ datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1], datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3] ], axis=0) logging.info("Source remaining after datasec cut: {} of {}".format( len(obj_file.data[t]['X']), len(t))) table_hdu = fits.table_to_hdu(obj_file.data[t]) table_hdu.header['CATALOG'] = name table_hdu.header['EXTNAME'] = ext hdu_list.append(table_hdu) del table_hdu del obj_file os.unlink(name) name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits') if os.access(name, os.F_OK): os.unlink(name) hdu_list.writeto(name) uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits") logging.info(name + " -> " + uri) count = 0 while True: print(("Copy attempt {}".format(count))) try: storage.copy(name, uri) os.unlink(name) break except Exception as ex: if count > 10: raise ex count += 1
def main(task='mkpsf'): parser = argparse.ArgumentParser( description='Run makepsf chunk of the OSSOS pipeline') parser.add_argument('--ccd', '-c', action='store', type=int, dest='ccd', default=None, help='which ccd to process, default is all') parser.add_argument("--dbimages", action="store", default="vos:OSSOS/dbimages", help='vospace dbimages containerNode') parser.add_argument("expnum", type=int, nargs='+', help="expnum(s) to process") parser.add_argument("--dry-run", action="store_true", help="DRY RUN, don't copy results to VOSpace, implies --force") parser.add_argument("--fk", action="store_true", help="Run fk images") parser.add_argument("--type", "-t", choices=['o', 'p', 's'], help="which type of image: o-RAW, p-ELIXIR, s-SCRAMBLE", default='p') parser.add_argument("--verbose", "-v", action="store_true") parser.add_argument("--force", default=False, action="store_true") parser.add_argument("--debug", "-d", action="store_true") args = parser.parse_args() util.set_logger(args) prefix = (args.fk and 'fk') or '' task = util.task() dependency = 'mk_mopheader' storage.DBIMAGES = args.dbimages exit_code = 0 for expnum in args.expnum: if args.ccd is None: if int(expnum) < 1785619: # Last exposures with 36 CCD Megaprime ccdlist = range(0,36) else: # First exposrues with 40 CCD Megaprime ccdlist = range(0, 40) else: ccdlist = [args.ccd] for ccd in ccdlist: if storage.get_status(task, prefix, expnum, version=args.type, ccd=ccd) and not args.force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, args.type, ccd)) continue storage.set_logger(task, prefix, expnum, ccd, args.type, args.dry_run) message = 'success' try: if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd): raise IOError("{} not yet run for {}".format(dependency, expnum)) mkpsf(expnum, ccd, args.type, args.dry_run, prefix=prefix) if args.dry_run: continue storage.set_status('fwhm', prefix, expnum, version=args.type, ccd=ccd, status=str(storage.get_fwhm( expnum, ccd, version=args.type))) storage.set_status('zeropoint', prefix, expnum, version=args.type, ccd=ccd, status=str(storage.get_zeropoint( expnum, ccd, version=args.type))) except Exception as e: message = str(e) logging.error(message) if not args.dry_run: storage.set_status(task, prefix, expnum, version=args.type, ccd=ccd, status=message) return exit_code
#!python from ossos import storage from astropy.io import ascii from cStringIO import StringIO import math import sys expnum = sys.argv[1] ccd = sys.argv[2] fwhm = storage.get_fwhm(expnum, ccd) class Apcor(object): def __init__(self, expnum, ccd): self.expnum = expnum self.ccd = ccd self.apcor_array = storage.open_vos_or_local(self.uri).read().split() @property def ap_in(self): return float(self.apcor_array[0]) @property def ap_cor(self): return float(self.apcor_array[2]) @property def uri(self): return storage.get_uri(self.expnum, self.ccd, ext='apcor')
def main(task='mkpsf'): parser = argparse.ArgumentParser( description='Run makepsf chunk of the OSSOS pipeline') parser.add_argument('--ccd', '-c', action='store', type=int, dest='ccd', default=None, help='which ccd to process, default is all') parser.add_argument('--ignore-update-headers', action='store_true', dest='ignore_update_headers') parser.add_argument("--dbimages", action="store", default="vos:OSSOS/dbimages", help='vospace dbimages containerNode') parser.add_argument("expnum", type=int, nargs='+', help="expnum(s) to process") parser.add_argument( "--dry_run", action="store_true", help="DRY RUN, don't copy results to VOSpace, implies --force") parser.add_argument("--fk", action="store_true", help="Run fk images") parser.add_argument( "--type", "-t", choices=['o', 'p', 's'], help="which type of image: o-RAW, p-ELIXIR, s-SCRAMBLE", default='p') parser.add_argument("--verbose", "-v", action="store_true") parser.add_argument("--force", default=False, action="store_true") parser.add_argument("--debug", "-d", action="store_true") args = parser.parse_args() if args.dry_run: args.force = True if args.debug: logging.basicConfig(level=logging.DEBUG) elif args.verbose: logging.basicConfig(level=logging.INFO) prefix = (args.fk and 'fk') or '' storage.DBIMAGES = args.dbimages if args.ccd is None: ccdlist = range(0, 36) else: ccdlist = [args.ccd] exit_code = 0 for expnum in args.expnum: for ccd in ccdlist: storage.set_logger( os.path.splitext(os.path.basename(sys.argv[0]))[0], prefix, expnum, ccd, args.type, args.dry_run) if storage.get_status( expnum, ccd, prefix + task, version=args.type) and not args.force: logging.info( "{} completed successfully for {} {} {} {}".format( task, prefix, expnum, args.type, ccd)) continue message = 'success' try: if not storage.get_status( expnum, 36, 'update_header') and not args.ignore_update_headers: raise IOError( "update_header not yet run for {}".format(expnum)) mkpsf(expnum, ccd, args.type, args.dry_run, prefix=prefix) if args.dry_run: continue storage.set_status(expnum, ccd, prefix + 'fwhm', version=args.type, status=str( storage.get_fwhm(expnum, ccd, version=args.type))) storage.set_status(expnum, ccd, prefix + 'zeropoint', version=args.type, status=str( storage.get_zeropoint( expnum, ccd, version=args.type))) except CalledProcessError as cpe: message = str(cpe.output) exit_code = message except Exception as e: message = str(e) logging.error(message) if not args.dry_run: storage.set_status(expnum, ccd, prefix + 'mkpsf', version=args.type, status=message) return exit_code
def remeasure(mpc_in, recentroided=False): """ re-measure the astrometry and photometry of the given mpc line """ if mpc_in.null_observation: return mpc_in mpc_obs = deepcopy(mpc_in) logging.debug("rm start: {}".format(mpc_obs.to_string())) if not isinstance(mpc_obs.comment, mpc.OSSOSComment): logging.error( "Failed to convert comment line") return mpc_in parts = re.search('(?P<expnum>\d{7})(?P<type>\S)(?P<ccd>\d\d)', mpc_obs.comment.frame) if not parts: logging.error( "Failed to parse expnum from frame info in comment line") return mpc_in start_coordinate = mpc_in.coordinate #assert isinstance(start_coordinate, ICRSCoordinates) try: header = storage._get_sghead(parts.group('expnum'))[int(parts.group('ccd')) + 1] except IOError as ioerr: logging.error(str(ioerr)) logging.error("Failed to get astrometric header for: {}".format(mpc_obs)) return mpc_in this_wcs = wcs.WCS(header) try: #(x , y ) = this_wcs.sky2xy(mpc_obs.coordinate.ra.degree, mpc_obs.coordinate.dec.degree, usepv=True) x = mpc_obs.comment.x y = mpc_obs.comment.y (ra, dec) = this_wcs.xy2sky(x, y, usepv=True) mpc_obs.coordinate = (ra, dec) sep = mpc_in.coordinate.separation(mpc_obs.coordinate) if sep > TOLERANCE*20 and mpc_in.discovery and (int(parts.group('ccd')) < 18 or int(parts.group('ccd')) in [36, 37]): logging.warn("Large ({}) offset using X/Y in comment line to compute RA/DEC".format(sep)) logging.warn("This is a discovery line so flipping/flopping the x/y position recorded in comment as that " "may be taken from a flip/flopped image.") x = header['NAXIS1'] - x + 1 y = header['NAXIS2'] - y + 1 sep = mpc_in.coordinate.separation(mpc_obs.coordinate) (ra, dec) = this_wcs.xy2sky(x, y, usepv=True) # print "--> x/y coordinates ({},{}) and recomputing ra/dec ({},{})".format(x, y, ra, dec) mpc_obs.coordinate = (ra, dec) if sep > TOLERANCE*20 and mpc_in.discovery: logging.warn( "Large ({}) offset after flipping, so reverting.".format(sep)) # Try un-flipping. x = float(mpc_in.comment.x) y = float(mpc_in.comment.y) (ra, dec) = this_wcs.xy2sky(x, y, usepv=True) # print "--> x/y coordinates ({},{}) and recomputing ra/dec ({},{})".format(x, y, ra, dec) mpc_obs.coordinate = (ra, dec) except: logging.warn("Failed to get the X Y coordinate from comment line.") return mpc_in # (ra, dec) = this_wcs.xy2sky(x, y, usepv=True) # mpc_obs.coordinate = (ra, dec) # sep = mpc_in.coordinate.separation(mpc_obs.coordinate) if sep > TOLERANCE and not recentroided: # use the old header RA/DEC to predict the X/Y and then use that X/Y to get new RA/DEC logging.warn("sep: {} --> large offset when using comment line X/Y to compute RA/DEC") logging.warn("Using RA/DEC and original WCS to compute X/Y and replacing X/Y in comment.".format(sep)) header2 = storage.get_astheader(parts.group('expnum'), int(parts.group('ccd'))) image_wcs = wcs.WCS(header2) (x, y) = image_wcs.sky2xy(mpc_in.coordinate.ra.degree, mpc_in.coordinate.dec.degree, usepv=False) (ra, dec) = this_wcs.xy2sky(x, y, usepv=True) logging.warn("Coordinate changed: ({:5.2f},{:5.2f}) --> ({:5.2f},{:5.2f})".format(mpc_obs.comment.x, mpc_obs.comment.y, x, y)) mpc_obs.coordinate = (ra, dec) mpc_obs.comment.x = x mpc_obs.comment.y = y try: merr = float(mpc_obs.comment.mag_uncertainty) fwhm = float(storage.get_fwhm(parts.group('expnum'), int(parts.group('ccd')))) * header['PIXSCAL1'] centroid_err = merr * fwhm logging.debug("Centroid uncertainty: {} {} => {}".format(merr, fwhm, centroid_err)) except Exception as err: logging.error(str(err)) logging.error("Failed to compute centroid_err for observation:\n" "{}\nUsing default of 0.2".format(mpc_obs.to_string())) centroid_err = 0.2 mpc_obs.comment.astrometric_level = header.get('ASTLEVEL', "0") try: asterr = float(header['ASTERR']) residuals = (asterr ** 2 + centroid_err ** 2) ** 0.5 logging.debug("Residuals: {} {} => {}".format(asterr, centroid_err, residuals)) except Exception as err: logging.error(str(err)) logging.error("Failed while trying to compute plate uncertainty for\n{}".format(mpc_obs.to_stirng())) logging.error('Using default of 0.25') residuals = 0.25 mpc_obs.comment.plate_uncertainty = residuals logging.debug("sending back: {}".format(mpc_obs.to_string())) return mpc_obs
else: ccdlist = [args.ccd] for expnum in args.expnum: for ccd in ccdlist: if storage.get_status(expnum, ccd, 'mkpsf') and not args.force: logging.info("Already did %s %s, skipping" %( str(expnum), str(ccd))) continue try: message = 'success' mkpsf(expnum, ccd) storage.set_status(expnum, ccd, 'fwhm', str(storage.get_fwhm( expnum, ccd))) storage.set_status(expnum, ccd, 'zeropoint', str(storage.get_zeropoint( expnum, ccd))) except Exception as e: message = str(e) logging.error(message) storage.set_status( expnum, ccd, 'mkpsf', message)
def remeasure(mpc_in): """ re-measure the astrometry and photometry of the given mpc line """ # TODO Actually implement this. if mpc_in.null_observation: return mpc_in mpc_obs = deepcopy(mpc_in) logging.debug("rm start: {}".format(mpc_obs.to_string())) if not isinstance(mpc_obs.comment, mpc.OSSOSComment): return mpc_in parts = re.search('(?P<expnum>\d{7})(?P<type>\S)(?P<ccd>\d\d)', mpc_obs.comment.frame) if not parts: return mpc_in start_coordinate = mpc_in.coordinate assert isinstance(start_coordinate, ICRSCoordinates) try: header = storage.get_astheader(parts.group('expnum'), int(parts.group('ccd'))) except IOError as ioerr: logging.error(str(ioerr)) logging.error("Failed to get astrometric header for: {}".format(mpc_obs)) return mpc_in this_wcs = wcs.WCS(header) try: x = float(mpc_obs.comment.x) y = float(mpc_obs.comment.y) if mpc_in.discovery: logging.debug("Discovery astrometric lines are normally flipped relative to storage.") x = header['NAXIS1'] - x + 1 y = header['NAXIS2'] - y + 1 except: logging.warn("Failed to X/Y from comment line.") return mpc_in (ra, dec) = this_wcs.xy2sky(x, y) mpc_obs.coordinate = (ra, dec) logging.debug("rm updat: {}".format(mpc_obs.to_string())) sep = start_coordinate.separation(mpc_obs.coordinate).degrees * 3600.0 if sep > TOLERANCE and mpc_in.discovery: logging.warn("{} --> Using the unflipped X/Y for a discovery observation line.".format(sep)) logging.debug("{} {} {} {} {}".format(sep, x, y, mpc_obs.comment.x, mpc_obs.comment.y)) # Try un-flipping. x = float(mpc_obs.comment.x) y = float(mpc_obs.comment.y) (ra, dec) = this_wcs.xy2sky(x, y) #print "--> x/y coordinates ({},{}) and recomputing ra/dec ({},{})".format(x, y, ra, dec) mpc_obs.coordinate = (ra, dec) sep = start_coordinate.separation(mpc_obs.coordinate).degrees * 3600.0 logging.debug("remod: {}".format(mpc_obs.coordinate)) logging.debug("SEP: {}".format(sep)) logging.debug("rm flip: {}".format(mpc_obs.to_string())) if sep > TOLERANCE: ## use the old header RA/DEC to predict the X/Y and then use that X/Y to get new RA/DEC logging.debug("Ignoring recorded X/Y and using previous to RA/DEC and WCS to compute X/Y") header2 = storage.get_image(parts.group('expnum'), int(parts.group('ccd')), return_file=False, flip_image=False)[0].header image_wcs = wcs.WCS(header2) (x, y) = image_wcs.sky2xy(mpc_in.coordinate.ra.degrees, mpc_in.coordinate.dec.degrees) (ra, dec) = this_wcs.xy2sky(x, y) logging.debug("({},{}) --> ({},{})".format(mpc_obs.comment.x, mpc_obs.comment.y, x, y)) mpc_obs.coordinate = (ra, dec) mpc_obs.comment.x = x mpc_obs.comment.y = y try: merr = float(mpc_obs.comment.mag_uncertainty) fwhm = float(storage.get_fwhm(parts.group('expnum'), int(parts.group('ccd')))) * header['PIXSCAL1'] centroid_err = merr * fwhm logging.debug("Centroid uncertainty: {} {} => {}".format(merr, fwhm, centroid_err)) except Exception as err: logging.error(str(err)) logging.error("Failed to compute centroid for observation:\n" "{}\nUsing default of 0.2".format(mpc_obs.to_string())) centroid_err = 0.2 mpc_obs.comment.astrometric_level = header.get('ASTLEVEL', "0") try: asterr = float(header['ASTERR']) residuals = (asterr ** 2 + centroid_err ** 2) ** 0.5 logging.debug("Residuals: {} {} => {}".format(asterr, centroid_err, residuals)) except Exception as err: logging.error(str(err)) logging.error("Failed while trying to compute plate uncertainty for\n{}".format(mpc_obs.to_stirng())) logging.error('Using default of 0.25') residuals = 0.25 mpc_obs.comment.plate_uncertainty = residuals logging.debug("sending back: {}".format(mpc_obs.to_string())) return mpc_obs
def run(expnum, ccd, version='p', prefix='', dry_run=False, force=False): message = 'success' if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task=task, prefix=prefix, expnum=expnum, ccd=ccd, version=version, dry_run=dry_run): try: if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd): raise IOError("{} not yet run for {}".format(dependency, expnum)) header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list(header.get('DATASEC', '[80:2080,30,4160]')) try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del(header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.'+ext name = "{}p{:02d}.{}".format(expnum, ccd, extension) try: os.unlink(name) os.unlink(name+".fits") except: pass logging.info("Retrieving {}".format(name)) obj_file = mop_file.Parser(expnum, ccd, extension) obj_file.parse() t = numpy.all([datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1], datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3]], axis=0) logging.info("Source remaining after datasec cut: {} of {}".format(len(obj_file.data[t]['X']), len(t))) table_hdu = fits.table_to_hdu(obj_file.data[t]) table_hdu.header['CATALOG'] = name table_hdu.header['EXTNAME'] = ext hdu_list.append(table_hdu) del table_hdu del obj_file os.unlink(name) name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits') if os.access(name, os.F_OK): os.unlink(name) hdu_list.writeto(name) uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits") logging.info(name+" -> "+uri) count = 0 with open(name): while True: count += 1 logging.info("Copy attempt {}".format(count)) try: storage.copy(name, uri) os.unlink(name) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as e: message = str(e) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version=version, ccd=ccd, status=message)
def step1(expnum, ccd, prefix='', version='p', fwhm=4, sex_thresh=1.3, wave_thresh=2.7, maxcount=30000): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) mopheader = storage.get_image(expnum, ccd, version=version, ext='mopheader', prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] outfile = util.exec_prog([ 'step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount) ]) obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.jmp', prefix=prefix) obj_filename = basename + ".obj.jmp" storage.copy(obj_filename, obj_uri) ## for step1matt we need the weight image flat_name = fits.open(filename)[0].header['FLAT'] flat_name = flat_name[0:-5] flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators', rescale=False) if not os.access('weight.fits', os.R_OK): os.symlink(flat_filename, 'weight.fits') outfile = util.exec_prog([ 'step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount) ]) obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.matt', prefix=prefix) obj_filename = basename + ".obj.matt" storage.copy(obj_filename, obj_uri) return True
def run(expnum, ccd, version, dry_run=False, prefix="", force=False): """Run the OSSOS jmpmakepsf script. """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format( task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status( dependency, prefix, expnum, version, ccd=ccd): raise IOError("{} not yet run for {}".format( dependency, expnum)) # confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) # get image from the vospace storage area logging.info("Getting fits image from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) # get mopheader from the vospace storage area logging.info("Getting mopheader from VOSpace") mopheader_filename = storage.get_file(expnum, ccd, version=version, prefix=prefix, ext='mopheader') # run mkpsf process logging.info("Running mkpsf on %s %d" % (expnum, ccd)) logging.info( util.exec_prog( ['jmpmakepsf.csh', './', filename, 'yes', 'yes'])) if dry_run: return # place the results into VOSpace basename = os.path.splitext(filename)[0] for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext=ext) source = basename + "." + str(ext) count = 0 with open(source, 'r'): while True: count += 1 try: logging.info("Attempt {} to copy {} -> {}".format( count, source, dest)) storage.copy(source, dest) break except Exception as ex: if count > 10: raise ex # set some data parameters associated with the image, determined in this step. storage.set_status('fwhm', prefix, expnum, version=version, ccd=ccd, status=str( storage.get_fwhm(expnum, ccd=ccd, prefix=prefix, version=version))) storage.set_status('zeropoint', prefix, expnum, version=version, ccd=ccd, status=str( storage.get_zeropoint(expnum, ccd=ccd, prefix=prefix, version=version))) logging.info(message) except Exception as e: message = str(e) logging.error(message) storage.set_status(task, prefix, expnum, version, ccd=ccd, status=message) return
if expnum < 1785619: # Last exposures with 36 CCD Megaprime ccdlist = range(0, 36) else: # First exposrues with 40 CCD Megaprime ccdlist = range(0, 40) for ccd in ccdlist: try: try: header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list(header.get('DATASEC', '[80:2080,30,4160]')) except Exception as ex: print(ex) continue try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del(header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.'+ext name = "{}p{:02d}.{}".format(expnum, ccd, extension) try:
def plant(expnums, ccd, rmin, rmax, ang, width, version='s'): '''run the plant script on this combination of exposures''' ptf = open('proc-these-files', 'w') ptf.write("# Files to be planted and search\n") ptf.write("# image fwhm plant\n") for expnum in expnums: fwhm = storage.get_fwhm(expnum, ccd) filename = storage.get_image(expnum, ccd=ccd, version=version) ptf.write("%s %3.1f YES\n" % (filename[0:-5], fwhm)) for ext in [ 'apcor', 'obj.jmp', 'trans.jmp', 'psf.fits', 'mopheader', 'phot', 'zeropoint.used' ]: apcor = storage.get_image(expnum, ccd=ccd, version='s', ext=ext) ptf.close() cmd_args = [ 'plant.csh', os.curdir, str(rmin), str(rmax), str(ang), str(width) ] util.exec_prog(cmd_args) if args.dryrun: # Don't push back to VOSpace return uri = storage.get_uri('Object', ext='planted', version='', subdir=str(expnums[0]) + "/ccd%s" % (str(ccd).zfill(2))) storage.copy('Object.planted', uri) uri = os.path.join(os.path.dirname(uri), 'shifts') storage.copy('shifts', uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext='fits', prefix='fk') filename = os.path.basename(uri) storage.copy(filename, uri) for ext in [ 'mopheader', 'psf.fits', 'fwhm', 'apcor', 'zeropoint.used', 'trans.jmp' ]: storage.delete(expnum, ccd, 's', ext, prefix='fk') storage.vlink(expnum, ccd, 'p', ext, expnum, ccd, 's', ext, l_prefix='fk') return
## You should have received a copy of the GNU General Public License ## ## along with OSSOS-MOP. If not, see <http://www.gnu.org/licenses/>. ## ## ## ################################################################################ from ossos import storage from astropy.io import ascii from cStringIO import StringIO import math import sys expnum = sys.argv[1] ccd = sys.argv[2] fwhm = storage.get_fwhm(expnum, ccd) class Apcor(object): def __init__(self, expnum, ccd): self.expnum = expnum self.ccd = ccd self.apcor_array = storage.open_vos_or_local(self.uri).read().split() @property def ap_in(self): return float(self.apcor_array[0]) @property def ap_cor(self): return float(self.apcor_array[2])
# Last exposures with 36 CCD Megaprime ccdlist = list(range(0, 36)) else: # First exposrues with 40 CCD Megaprime ccdlist = list(range(0, 40)) for ccd in ccdlist: try: try: header = storage.get_astheader(expnum, ccd) datasec = storage.datasec_to_list( header.get('DATASEC', '[80:2080,30,4160]')) except Exception as ex: print(ex) continue try: fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd)) except: fwhm = 'unknown' for keyword in del_keyword_list: try: del (header[keyword]) except: pass header['FWHM'] = (fwhm, 'FWHM in pixels') header['EXTNAME'] = 'header' primary_hdu = fits.PrimaryHDU(header=header) hdu_list = fits.HDUList([ primary_hdu, ]) for ext in ['jmp', 'matt']: extension = 'obj.' + ext
def run(expnum, ccd, prefix='', version='p', sex_thresh=_SEX_THRESHOLD, wave_thresh=_WAVE_THRESHOLD, maxcount=_MAX_COUNT, dry_run=False, force=True, ignore=False): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, version, ccd) and not ignore: raise IOError(35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}".format( task, dependency, prefix, expnum, version, ccd)) logging.info("Retrieving imaging and input parameters from VOSpace") storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader') filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version, default=3.5) basename = os.path.splitext(filename)[0] _get_weight_map(filename, ccd) logging.info("Launching step1jmp") logging.info(util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)])) logging.info(util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)])) if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') if not dry_run: for ext in ['obj.jmp', 'obj.matt']: obj_uri = storage.get_uri(expnum, ccd, version=version, ext=ext, prefix=prefix) obj_filename = basename + "." + ext count = 0 with open(obj_filename, 'r'): while True: try: count += 1 logging.info("Attempt {} to copy {} -> {}".format(count, obj_filename, obj_uri)) storage.copy(obj_filename, obj_uri) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)
def run(expnum, ccd, version, dry_run=False, prefix="", force=False): """Run the OSSOS jmpmakepsf script. """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd): raise IOError("{} not yet run for {}".format(dependency, expnum)) # confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) # get image from the vospace storage area logging.info("Getting fits image from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) # get mopheader from the vospace storage area logging.info("Getting mopheader from VOSpace") mopheader_filename = storage.get_file(expnum, ccd, version=version, prefix=prefix, ext='mopheader') # run mkpsf process logging.info("Running mkpsf on %s %d" % (expnum, ccd)) logging.info(util.exec_prog(['jmpmakepsf.csh', './', filename, 'yes', 'yes'])) if dry_run: return # place the results into VOSpace basename = os.path.splitext(filename)[0] for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext=ext) source = basename + "." + str(ext) count = 0 with open(source, 'r'): while True: count += 1 try: logging.info("Attempt {} to copy {} -> {}".format(count, source, dest)) storage.copy(source, dest) break except Exception as ex: if count > 10: raise ex # set some data parameters associated with the image, determined in this step. storage.set_status('fwhm', prefix, expnum, version=version, ccd=ccd, status=str(storage.get_fwhm( expnum, ccd=ccd, prefix=prefix, version=version))) storage.set_status('zeropoint', prefix, expnum, version=version, ccd=ccd, status=str(storage.get_zeropoint( expnum, ccd=ccd, prefix=prefix, version=version))) logging.info(message) except Exception as e: message = str(e) logging.error(message) storage.set_status(task, prefix, expnum, version, ccd=ccd, status=message) return