def step3(expnums, ccd, version, rate_min, rate_max, angle, width, field=None, prefix=None, dry_run=False, maximum_flux_ratio=3, minimum_area=5, minimum_median_flux=1000.0): """run the actual step3 on the given exp/ccd combo""" jmp_args = ['step3jmp'] matt_args = ['step3jjk'] idx = 0 cmd_args = [] for expnum in expnums: idx += 1 for ext in ['unid.jmp', 'unid.matt']: storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) image = os.path.splitext(os.path.basename(storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0] cmd_args.append('-f%d' % idx) cmd_args.append(image) cmd_args.extend(['-rn', str(rate_min), '-rx', str(rate_max), '-a', str(angle), '-w', str(width)]) jmp_args.extend(cmd_args) # Add some extra arguemnents for the ISO search. cmd_args.extend(['-fr', str(maximum_flux_ratio), '-ma', str(minimum_area), '-mf', str(minimum_median_flux)]) matt_args.extend(cmd_args) logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) if dry_run: return if field is None: field = str(expnums[0]) storage.mkdir(os.path.dirname(storage.get_uri(field, ccd=ccd, version=version, prefix=prefix))) for ext in ['moving.jmp', 'moving.matt']: uri = storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = '%s%d%s%s.%s' % (prefix, expnums[0], version, str(ccd).zfill(2), ext) storage.copy(filename, uri) return
def compute_trans(expnums, ccd, version, prefix=None, default="WCS"): """ Pull the astrometric header for each image, compute an x/y transform and compare to trans.jmp this one overides trans.jmp if they are very different. @param expnums: @param ccd: @param version: @param prefix: @return: None """ wcs_dict = {} for expnum in expnums: try: # TODO This assumes that the image is already N/E flipped. # If compute_trans is called after the image is retrieved from archive then we get the disk version. filename = storage.get_image(expnum, ccd, version, prefix=prefix) this_wcs = wcs.WCS(fits.open(filename)[0].header) except Exception as err: logging.warning("WCS Trans compute failed. {}".format(str(err))) return wcs_dict[expnum] = this_wcs x0 = wcs_dict[expnums[0]].header['NAXIS1'] / 2.0 y0 = wcs_dict[expnums[0]].header['NAXIS2'] / 2.0 (ra0, dec0) = wcs_dict[expnums[0]].xy2sky(x0, y0) result = "" for expnum in expnums: filename = storage.get_file(expnum, ccd, version, ext='.trans.jmp', prefix=prefix) jmp_trans = file(filename, 'r').readline().split() (x, y) = wcs_dict[expnum].sky2xy(ra0, dec0) x1 = float( jmp_trans[0]) + float(jmp_trans[1]) * x + float(jmp_trans[2]) * y y1 = float( jmp_trans[3]) + float(jmp_trans[4]) * x + float(jmp_trans[5]) * y dr = math.sqrt((x1 - x0)**2 + (y1 - y0)**2) if dr > 0.5: result += "WARNING: WCS-JMP transforms mis-matched {} reverting to using {}.\n".format( expnum, default) if default == "WCS": uri = storage.dbimages_uri(expnum, ccd, version, ext='.trans.jmp', prefix=prefix) filename = os.path.basename(uri) trans = file(filename, 'w') trans.write("{:5.2f} 1. 0. {:5.2f} 0. 1.\n".format( x0 - x, y0 - y)) trans.close() else: result += "WCS-JMP transforms match {}\n".format(expnum) return result
def test_KBOGenerator(self): filename = storage.get_file(self.expnum, self.ccd, self.version, ext='fits') shifts = storage.get_file(self.expnum, self.ccd, self.version, ext='shifts') psf = storage.get_file(self.expnum, self.ccd, self.version, ext='psf.fits') header = fits.open(filename)[0].header bounds = util.get_pixel_bounds_from_datasec_keyword(header.get('DATASEC', '[33:2080,1:4612]')) outfile = NamedTemporaryFile() shifts = json.loads(open(shifts, 'rb').read()) kbos = KBOGenerator.get_kbos(n=self.number, rate=(self.rmin, self.rmax), angle=(self.ang - self.rmax, self.rmax + self.rmin), mag=(self.mmin, self.mmax), x=(bounds[0][0], bounds[0][1]), y=(bounds[1][0], bounds[1][1]), filename=outfile.name) plant.plant_kbos(filename, psf, kbos, shifts, "fk") self.assertEqual(len(kbos), self.number)
def plant(expnums, ccd, rmin, rmax, ang, width, number=10, mmin=21.0, mmax=25.5, version='s', dry_run=False): """Plant artificial sources into the list of images provided. :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to :param ccd: which ccd to work on. :param rmin: The minimum rate of motion to add sources at (''/hour) :param rmax: The maximum rate of motion to add sources at (''/hour) :param ang: The mean angle of motion to add sources :param width: The +/- range of angles of motion :param version: Add sources to the 'o', 'p' or 's' images :param dry_run: don't push results to VOSpace. """ # Construct a list of artificial KBOs with positions in the image # and rates of motion within the bounds given by the caller. filename = storage.get_image(expnums[0], ccd=ccd, version=version) header = fits.open(filename)[0].header bounds = util.get_pixel_bounds_from_datasec_keyword(header.get('DATASEC', '[33:2080,1:4612]')) # generate a set of artificial KBOs to add to the image. kbos = KBOGenerator.get_kbos(n=number, rate=(rmin, rmax), angle=(ang - width, ang + width), mag=(mmin, mmax), x=(bounds[0][0], bounds[0][1]), y=(bounds[1][0], bounds[1][1]), filename='Object.planted') for expnum in expnums: filename = storage.get_image(expnum, ccd, version) psf = storage.get_file(expnum, ccd, version, ext='psf.fits') plant_kbos(filename, psf, kbos, get_shifts(expnum, ccd, version), "fk") if dry_run: return uri = storage.get_uri('Object', ext='planted', version='', subdir=str( expnums[0]) + "/ccd%s" % (str(ccd).zfill(2))) storage.copy('Object.planted', uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext='fits', prefix='fk') filename = os.path.basename(uri) storage.copy(filename, uri) return
def filename(self): """ Name if the MOP formatted file to parse. @rtype: basestring @return: filename """ if self._filename is None: self._filename = storage.get_file(self.basename, self.ccd, ext=self.extension, version=self.type, prefix=self.prefix) return self._filename
def make_iraf_psf(fname): with fits.open(fname) as stp: # Read in image info placed in header previously header = stp[0].header ra = header["RA"] dec = header["DEC"] for ext in range(len(stp)): # Look in each header section try: header = stp[ext].header expnum = header["EXPNUM"] except: continue stamp = fits.open(fname) for ext in range( len(stamp) ): # Establish size of image and location of source in image (source should be centered in cutout) try: world_coords = wcs.WCS(stamp[ext].header) image_coords = world_coords.sky2xy(float(ra), float(dec)) image_size = stamp[ext].data.shape except: continue # Some cutouts are split over multiple CCDs, meaning the source may not actually be in some of these small image 'pieces' if 0 < image_coords[0] < image_size[0] and 0 < image_coords[1] < image_size[1]: ccdnum = stamp[ext].header.get("EXTNO", None) psf_file = storage.get_file(expnum, ccd=ccdnum, ext="psf.fits") # Get psf file for image from storage iraf.noao() iraf.digiphot() iraf.daophot(_doprint=0) psf_image = psf_file.replace("psf", "seepsf") # Seepsf creates psf_image iraf.seepsf(psf_file, psf_image, xpsf=image_coords[0], ypsf=image_coords[1]) os.unlink(psf_file) return psf_image else: return None
def compute_trans(expnums, ccd, version, prefix=None, default="WCS"): """ Pull the astrometric header for each image, compute an x/y transform and compare to trans.jmp this one overides trans.jmp if they are very different. @param expnums: @param ccd: @param version: @param prefix: @return: None """ wcs_dict = {} for expnum in expnums: try: # TODO This assumes that the image is already N/E flipped. # If compute_trans is called after the image is retrieved from archive then we get the disk version. filename = storage.get_image(expnum, ccd, version, prefix=prefix) this_wcs = wcs.WCS(fits.open(filename)[0].header) except Exception as err: logging.warning("WCS Trans compute failed. {}".format(str(err))) return wcs_dict[expnum] = this_wcs x0 = wcs_dict[expnums[0]].header['NAXIS1'] / 2.0 y0 = wcs_dict[expnums[0]].header['NAXIS2'] / 2.0 (ra0, dec0) = wcs_dict[expnums[0]].xy2sky(x0, y0) result = "" for expnum in expnums: filename = storage.get_file(expnum, ccd, version, ext='.trans.jmp', prefix=prefix) jmp_trans = file(filename, 'r').readline().split() (x, y) = wcs_dict[expnum].sky2xy(ra0, dec0) x1 = float(jmp_trans[0]) + float(jmp_trans[1]) * x + float(jmp_trans[2]) * y y1 = float(jmp_trans[3]) + float(jmp_trans[4]) * x + float(jmp_trans[5]) * y dr = math.sqrt((x1 - x0) ** 2 + (y1 - y0) ** 2) if dr > 0.5: result += "WARNING: WCS-JMP transforms mis-matched {} reverting to using {}.\n".format(expnum, default) if default == "WCS": uri = storage.dbimages_uri(expnum, ccd, version, ext='.trans.jmp', prefix=prefix) filename = os.path.basename(uri) trans = file(filename, 'w') trans.write("{:5.2f} 1. 0. {:5.2f} 0. 1.\n".format(x0 - x, y0 - y)) trans.close() else: result += "WCS-JMP transforms match {}\n".format(expnum) return result
def get_shifts(expnum, ccd, version): return json.loads( open(storage.get_file(expnum, ccd, version, ext='shifts')).read())
def plant(expnums, ccd, rmin, rmax, ang, width, number=10, mmin=21.0, mmax=25.5, version='s', dry_run=False, force=True): """Plant artificial sources into the list of images provided. @param dry_run: don't push results to VOSpace. @param width: The +/- range of angles of motion @param ang: The mean angle of motion to add sources @param rmax: The maximum rate of motion to add sources at (''/hour) @param rmin: The minimum rate of motion to add sources at (''/hour) @param expnums: list of MegaPrime exposure numbers to add artificial KBOs to @param ccd: which ccd to work on. @param mmax: Maximum magnitude to plant sources at @param version: Add sources to the 'o', 'p' or 's' images @param mmin: Minimum magnitude to plant sources at @param number: number of sources to plant. @param force: Run, even if we already succeeded at making a fk image. """ message = storage.SUCCESS if storage.get_status(task, "", expnums[0], version, ccd) and not force: logging.info("{} completed successfully for {}{}{}{:02d}".format( task, "", expnums[0], version, ccd)) return with storage.LoggingManager(task, "", expnums[0], ccd, version, dry_run): try: # Construct a list of artificial KBOs with positions in the image # and rates of motion within the bounds given by the caller. filename = storage.get_image(expnums[0], ccd=ccd, version=version) header = fits.open(filename)[0].header bounds = util.get_pixel_bounds_from_datasec_keyword( header.get('DATASEC', '[33:2080,1:4612]')) # generate a set of artificial KBOs to add to the image. kbos = KBOGenerator.get_kbos(n=number, rate=(rmin, rmax), angle=(ang - width, ang + width), mag=(mmin, mmax), x=(bounds[0][0], bounds[0][1]), y=(bounds[1][0], bounds[1][1]), filename='Object.planted') for expnum in expnums: filename = storage.get_image(expnum, ccd, version) psf = storage.get_file(expnum, ccd, version, ext='psf.fits') plant_kbos(filename, psf, kbos, get_shifts(expnum, ccd, version), "fk") if dry_run: return uri = storage.get_uri('Object', ext='planted', version='', subdir=f"{expnums[0]}/ccd{int(ccd):02d}") storage.copy('Object.planted', uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext='fits', prefix='fk') filename = os.path.basename(uri) storage.copy(filename, uri) except Exception as ex: message = str(ex) logging.error(message) storage.set_status(task, "", expnums[0], version, ccd, status=message) return
def run(expnums, ccd, version, prefix=None, dry_run=False, default="WCS", force=False): """run the actual step2 on the given exp/ccd combo""" jmp_trans = ['step2ajmp'] jmp_args = ['step2bjmp'] matt_args = ['step2matt_jmp'] if storage.get_status(task, prefix, expnums[0], version, ccd) and not force: logging.info("{} completed successfully for {}{}{}{:02d}".format( task, prefix, expnums[0], version, ccd)) return with storage.LoggingManager(task, prefix, expnums[0], ccd, version, dry_run): try: for expnum in expnums: if not storage.get_status( dependency, prefix, expnum, version=version, ccd=ccd): raise IOError( 35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}" .format(task, dependency, prefix, expnum, version, ccd)) message = storage.SUCCESS idx = 0 logging.info("Retrieving catalog files to do matching.") for expnum in expnums: jmp_args.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]) jmp_trans.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]) idx += 1 matt_args.append('-f%d' % idx) matt_args.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.matt', prefix=prefix)[0:-9]) logging.info( "Computing the catalog alignment using sources in catalogs.") try: logging.info(util.exec_prog(jmp_trans)) if default == "WCS": logging.info("Comparing computed transform to WCS values") logging.info( compute_trans(expnums, ccd, version, prefix, default=default)) except Exception as ex: logging.info("JMP Trans failed: {}".format(ex)) logging.info( compute_trans(expnums, ccd, version, prefix, default="WCS")) logging.info("Using transform to match catalogs for three images.") logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) # check that the shifts from step2 are rational by matching the bright star lists. logging.info( "Uisng checktrans to ensure that transforms were reasonable.") check_args = ['checktrans'] if os.access('proc-these-files', os.R_OK): os.unlink('proc-these-files') ptf = open('proc-these-files', 'w') ptf.write( "# A dummy file that is created so checktrans could run.\n") ptf.write("# Frame FWHM PSF?\n") for expnum in expnums: filename = os.path.splitext( storage.get_image(expnum, ccd, version=version, prefix=prefix))[0] if not os.access(filename + ".bright.psf", os.R_OK): os.link(filename + ".bright.jmp", filename + ".bright.psf") if not os.access(filename + ".obj.psf", os.R_OK): os.link(filename + ".obj.jmp", filename + ".obj.psf") ptf.write("{:>19s}{:>10.1f}{:>5s}\n".format( filename, _FWHM, "NO")) ptf.close() if os.access('BAD_TRANS', os.F_OK): os.unlink('BAD_TRANS') logging.info(util.exec_prog(check_args)) if os.access('BAD_TRANS', os.F_OK): raise OSError(errno.EBADMSG, 'BAD_TRANS') if os.access('proc-these-files', os.F_OK): os.unlink('proc-these-files') if dry_run: return for expnum in expnums: for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']: uri = storage.dbimages_uri(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = os.path.basename(uri) storage.copy(filename, uri) except Exception as ex: message = str(ex) logging.error(message) storage.set_status(task, prefix, expnums[0], version, ccd, status=message) return
def run(expnum, ccd, prefix='', version='p', sex_thresh=_SEX_THRESHOLD, wave_thresh=_WAVE_THRESHOLD, maxcount=_MAX_COUNT, dry_run=False, force=True): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, version, ccd): raise IOError(35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}".format( task, dependency, prefix, expnum, version, ccd)) logging.info("Retrieving imaging and input parameters from VOSpace") storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader') filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] _get_weight_map(filename, ccd) logging.info("Launching step1jmp") logging.info(util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)])) logging.info(util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)])) if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') if not dry_run: for ext in ['obj.jmp', 'obj.matt']: obj_uri = storage.get_uri(expnum, ccd, version=version, ext=ext, prefix=prefix) obj_filename = basename + "." + ext count = 0 with open(obj_filename, 'r'): while True: try: count += 1 logging.info("Attempt {} to copy {} -> {}".format(count, obj_filename, obj_uri)) storage.copy(obj_filename, obj_uri) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)
def align(expnums, ccd, version='s', dry_run=False): """Create a 'shifts' file that transforms the space/flux/time scale of all images to the first image. This function relies on the .fwhm, .trans.jmp, .phot and .zeropoint.used files for inputs. The scaling we are computing here is for use in planting sources into the image at the same sky/flux locations while accounting for motions of sources with time. :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to, the first frame in the list is the reference. :param ccd: which ccd to work on. :param version: Add sources to the 'o', 'p' or 's' images :param dry_run: don't push results to VOSpace. """ # Get the images and supporting files that we need from the VOSpace area # get_image and get_file check if the image/file is already on disk. # re-computed fluxes from the PSF stars and then recompute x/y/flux scaling. # some dictionaries to hold the various scale pos = {} apcor = {} mags = {} zmag = {} mjdates = {} for expnum in expnums: filename = storage.get_image(expnum, ccd=ccd, version=version) zmag[expnum] = storage.get_zeropoint(expnum, ccd, prefix=None, version=version) mjdates[expnum] = float(fits.open(filename)[0].header.get('MJD-OBS')) apcor[expnum] = [ float(x) for x in open( storage.get_file( expnum, ccd=ccd, version=version, ext=storage.APCOR_EXT)).read().split() ] keys = ['crval1', 'cd1_1', 'cd1_2', 'crval2', 'cd2_1', 'cd2_2'] # load the .trans.jmp values into a 'wcs' like dictionary. # .trans.jmp maps current frame to reference frame in pixel coordinates. # the reference frame of all the frames supplied must be the same. shifts = dict( zip(keys, [ float(x) for x in open( storage.get_file( expnum, ccd=ccd, version=version, ext='trans.jmp')).read().split() ])) shifts['crpix1'] = 0.0 shifts['crpix2'] = 0.0 # now create a wcs object based on those transforms, this wcs links the current frame's # pixel coordinates to the reference frame's pixel coordinates. w = get_wcs(shifts) # get the PHOT file that was produced by the mkpsf routine phot = ascii.read(storage.get_file(expnum, ccd=ccd, version=version, ext='phot'), format='daophot') # compute the small-aperture magnitudes of the stars used in the PSF mags[expnum] = daophot.phot(filename, phot['XCENTER'], phot['YCENTER'], aperture=apcor[expnum][0], sky=apcor[expnum][1] + 1, swidth=apcor[expnum][0], zmag=zmag[expnum]) # covert the x/y positions to positions in Frame 1 based on the trans.jmp values. (x, y) = w.wcs_pix2world(mags[expnum]["XCENTER"], mags[expnum]["YCENTER"], 1) pos[expnum] = numpy.transpose([x, y]) # match this exposures PSF stars position against those in the first image of the set. idx1, idx2 = util.match_lists(pos[expnums[0]], pos[expnum]) # compute the magnitdue offset between the current frame and the reference. dmags = numpy.ma.array(mags[expnums[0]]["MAG"] - apcor[expnums[0]][2] - (mags[expnum]["MAG"][idx1] - apcor[expnum][2]), mask=idx1.mask) dmags.sort() # compute the median and determine if that shift is small compared to the scatter. dmag = dmags[int(len(dmags) / 2.0)] if math.fabs(dmag) > 3 * (dmags.std() + 0.01): logging.warn( "Magnitude shift {} between {} and {} is large: {}".format( dmag, expnums[0], expnum, shifts[expnum])) shifts['dmag'] = dmag shifts['emag'] = dmags.std() shifts['nmag'] = len(dmags.mask) - dmags.mask.sum() shifts['dmjd'] = mjdates[expnums[0]] - mjdates[expnum] shift_file = os.path.basename( storage.get_uri(expnum, ccd, version, '.shifts')) fh = open(shift_file, 'w') fh.write( json.dumps(shifts, sort_keys=True, indent=4, separators=(',', ': '))) fh.write('\n') fh.close() if not dry_run: storage.copy( shift_file, os.path.basename( storage.get_uri(expnum, ccd, version, '.shifts')))
def combine(expnum, ccd, prefix=None, file_type='p', field=None, measure3=MEASURE3, dry_run=False): if field is None: field = str(expnum) if prefix is not None and len(prefix) > 0: field = "%s_%s" % (prefix, field) field += "_%s%s" % (str(file_type), str(ccd)) logging.info("Doing combine on field {}".format(field)) for ext in ['moving.matt', 'moving.jmp']: storage.get_file(expnum, ccd=ccd, version=file_type, ext=ext, prefix=prefix) if prefix is not None and len(prefix) > 0: storage.get_file('Object', version='', ext='planted', subdir=str(expnum) + "/ccd%s" % (str(ccd).zfill(2))) else: prefix = '' cmd_args = ['comb-list', prefix + str(expnum) + file_type + str(ccd).zfill(2)] logging.info(str(cmd_args)) logging.info(util.exec_prog(cmd_args)) ext_list = ['cands.comb'] if prefix is not None and len(prefix) > 0: ext_list.extend(['jmp.missed', 'matt.missed', 'jmp.found', 'matt.found', 'comb.missed', 'comb.found']) for ext in ext_list: uri = storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=file_type, ext=ext) filename = os.path.basename(uri) if not os.access(filename, os.R_OK): logging.critical("No %s file" % filename) continue vospace_name = "%s.%s" % (field, ext) if not dry_run: logging.info("%s -> %s" % (filename, os.path.join(measure3, vospace_name))) storage.copy(filename, os.path.join(measure3, vospace_name)) base_name = prefix + str(expnum) + file_type + str(ccd).zfill(2) cands_file = base_name + '.cands.comb' if not os.access(cands_file, os.R_OK): no_cands_file = (prefix + str(expnum) + file_type + str(ccd).zfill(2) + '.no_candidates') open(no_cands_file, 'w').close() if not dry_run: vospace_name = "%s.no_candidates" % field storage.copy(no_cands_file, os.path.join(measure3, vospace_name)) return storage.SUCCESS # get the images we need to compute x/y ra/dec transforms # cands_file = mop_file.Parser().parse(cands_file) # print cands_file # for file_id in cands_file.header.file_ids: # rec_no = cands_file.header.file_ids.index(file_id) # storage.get_image(expnum=cands_file.header.keywords['EXPNUM'][rec_no], ccd=ccd, version=file_type, ext='fits', # prefix=prefix) cmd_args = ['measure3', prefix + str(expnum) + file_type + str(ccd).zfill(2)] logging.info("Running measure3") logging.info(util.exec_prog(cmd_args)) if not dry_run: filename = base_name + ".measure3.cands.astrom" vospace_filename = "%s.measure3.cands.astrom" % field storage.copy(filename, os.path.join(measure3, vospace_filename)) return storage.SUCCESS
def step2(expnums, ccd, version, prefix=None, dry_run=False, default="WCS"): """run the actual step2 on the given exp/ccd combo""" jmp_trans = ['step2ajmp'] jmp_args = ['step2bjmp'] matt_args = ['step2matt_jmp'] idx = 0 for expnum in expnums: jmp_args.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]) jmp_trans.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8]) idx += 1 matt_args.append('-f%d' % idx) matt_args.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.matt', prefix=prefix)[0:-9]) logging.info(util.exec_prog(jmp_trans)) if default == "WCS": logging.info( compute_trans(expnums, ccd, version, prefix, default=default)) logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) ## check that the shifts from step2 are rational check_args = ['checktrans'] if os.access('proc-these-files', os.R_OK): os.unlink('proc-these-files') ptf = open('proc-these-files', 'w') ptf.write("# A dummy file that is created so checktrans could run.\n") ptf.write("# Frame FWHM PSF?\n") for expnum in expnums: filename = os.path.splitext( storage.get_image(expnum, ccd, version=version, prefix=prefix))[0] if not os.access(filename + ".bright.psf", os.R_OK): os.link(filename + ".bright.jmp", filename + ".bright.psf") if not os.access(filename + ".obj.psf", os.R_OK): os.link(filename + ".obj.jmp", filename + ".obj.psf") ptf.write("{:>19s}{:>10.1f}{:>5s}\n".format(filename, _FWHM, "NO")) ptf.close() if os.access('BAD_TRANS', os.F_OK): os.unlink('BAD_TRANS') logging.info(util.exec_prog(check_args)) if os.access('BAD_TRANS', os.F_OK): raise ValueError(errno.EBADEXEC, 'BAD_TRANS') if os.access('proc-these-files', os.F_OK): os.unlink('proc-these-files') if dry_run: return for expnum in expnums: for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']: uri = storage.dbimages_uri(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = os.path.basename(uri) storage.copy(filename, uri) return
def combine(expnum, ccd, prefix=None, file_type='p', field=None, measure3=MEASURE3, dry_run=False): if field is None: field = str(expnum) if prefix is not None and len(prefix) > 0: field = "%s_%s" % (prefix, field) field += "_%s%s" % (str(file_type), str(ccd)) logging.info("Doing combine on field {}".format(field)) for ext in ['moving.matt', 'moving.jmp']: storage.get_file(expnum, ccd=ccd, version=file_type, ext=ext, prefix=prefix) if prefix is not None and len(prefix) > 0: storage.get_file('Object', version='', ext='planted', subdir=str(expnum) + "/ccd%s" % (str(ccd).zfill(2))) else: prefix = '' cmd_args = [ 'comb-list', prefix + str(expnum) + file_type + str(ccd).zfill(2) ] logging.info(str(cmd_args)) logging.info(util.exec_prog(cmd_args)) ext_list = ['cands.comb'] if prefix is not None and len(prefix) > 0: ext_list.extend([ 'jmp.missed', 'matt.missed', 'jmp.found', 'matt.found', 'comb.missed', 'comb.found' ]) for ext in ext_list: uri = storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=file_type, ext=ext) filename = os.path.basename(uri) if not os.access(filename, os.R_OK): logging.critical("No %s file" % filename) continue vospace_name = "%s.%s" % (field, ext) if not dry_run: logging.info("%s -> %s" % (filename, os.path.join(measure3, vospace_name))) storage.copy(filename, os.path.join(measure3, vospace_name)) base_name = prefix + str(expnum) + file_type + str(ccd).zfill(2) cands_file = base_name + '.cands.comb' if not os.access(cands_file, os.R_OK): no_cands_file = (prefix + str(expnum) + file_type + str(ccd).zfill(2) + '.no_candidates') open(no_cands_file, 'w').close() if not dry_run: vospace_name = "%s.no_candidates" % field storage.copy(no_cands_file, os.path.join(measure3, vospace_name)) return storage.SUCCESS # get the images we need to compute x/y ra/dec transforms cands_file = mop_file.Parser().parse(cands_file) for file_id in cands_file.header.file_ids: rec_no = cands_file.header.file_ids.index(file_id) storage.get_image(expnum=cands_file.header.keywords['EXPNUM'][rec_no], ccd=ccd, version=file_type, ext='fits', prefix=prefix) cmd_args = [ 'measure3', prefix + str(expnum) + file_type + str(ccd).zfill(2) ] logging.info("Running measure3") logging.info(util.exec_prog(cmd_args)) if not dry_run: filename = base_name + ".measure3.cands.astrom" vospace_filename = "%s.measure3.cands.astrom" % field storage.copy(filename, os.path.join(measure3, vospace_filename)) return storage.SUCCESS
def get_shifts(expnum, ccd, version): return json.loads(open(storage.get_file(expnum, ccd, version, ext='shifts')).read())
def run(expnums, ccd, version, rate_min, rate_max, angle, width, field=None, prefix=None, dry_run=False, force=False): """run the actual step2 on the given exp/ccd combo""" jmp_args = ['step3jmp'] matt_args = ['step3matt'] if storage.get_status(task, prefix, expnums[0], version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {}{}{}{:02d}".format( task, prefix, expnums[0], version, ccd)) return with storage.LoggingManager(task, prefix, expnums[0], ccd, version, dry_run): try: if not storage.get_status( dependency, prefix, expnums[0], version=version, ccd=ccd): raise IOError( 35, "Cannot start {} as {} not yet completed {}{}{}{:02d}". format(task, dependency, prefix, expnums[0], version, ccd)) # Default message is success, message gets overwritten with failure messages. message = storage.SUCCESS idx = 0 cmd_args = [] for expnum in expnums: idx += 1 for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']: storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) image = os.path.splitext( os.path.basename( storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0] cmd_args.append('-f%d' % idx) cmd_args.append(image) cmd_args.extend([ '-rn', str(rate_min), '-rx', str(rate_max), '-a', str(angle), '-w', str(width) ]) jmp_args.extend(cmd_args) matt_args.extend(cmd_args) logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) if dry_run: return if field is None: field = str(expnums[0]) # Make sure a dbimages destination exists for this file. storage.mkdir( os.path.dirname( storage.get_uri(field, ccd=ccd, version=version, prefix=prefix))) for ext in ['moving.jmp', 'moving.matt']: uri = storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = '%s%d%s%s.%s' % (prefix, expnums[0], version, str(ccd).zfill(2), ext) storage.copy(filename, uri) except Exception as ex: message = str(ex) logging.error(message) storage.set_status(task, prefix, expnums[0], version=version, ccd=ccd, status=message) return
def run(expnum, ccd, version, dry_run=False, prefix="", force=False): """Run the OSSOS jmpmakepsf script. """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd): raise IOError("{} not yet run for {}".format(dependency, expnum)) # confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) # get image from the vospace storage area logging.info("Getting fits image from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) # get mopheader from the vospace storage area logging.info("Getting mopheader from VOSpace") mopheader_filename = storage.get_file(expnum, ccd, version=version, prefix=prefix, ext='mopheader') # run mkpsf process logging.info("Running mkpsf on %s %d" % (expnum, ccd)) logging.info(util.exec_prog(['jmpmakepsf.csh', './', filename, 'yes', 'yes'])) if dry_run: return # place the results into VOSpace basename = os.path.splitext(filename)[0] for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext=ext) source = basename + "." + str(ext) count = 0 with open(source, 'r'): while True: count += 1 try: logging.info("Attempt {} to copy {} -> {}".format(count, source, dest)) storage.copy(source, dest) break except Exception as ex: if count > 10: raise ex # set some data parameters associated with the image, determined in this step. storage.set_status('fwhm', prefix, expnum, version=version, ccd=ccd, status=str(storage.get_fwhm( expnum, ccd=ccd, prefix=prefix, version=version))) storage.set_status('zeropoint', prefix, expnum, version=version, ccd=ccd, status=str(storage.get_zeropoint( expnum, ccd=ccd, prefix=prefix, version=version))) logging.info(message) except Exception as e: message = str(e) logging.error(message) storage.set_status(task, prefix, expnum, version, ccd=ccd, status=message) return
def step3(expnums, ccd, version, rate_min, rate_max, angle, width, field=None, prefix=None, dry_run=False): """run the actual step2 on the given exp/ccd combo""" jmp_args = ['step3jmp'] matt_args = ['step3matt'] idx = 0 cmd_args = [] for expnum in expnums: idx += 1 for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']: storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) image = os.path.splitext( os.path.basename( storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0] cmd_args.append('-f%d' % idx) cmd_args.append(image) cmd_args.extend([ '-rn', str(rate_min), '-rx', str(rate_max), '-a', str(angle), '-w', str(width) ]) jmp_args.extend(cmd_args) matt_args.extend(cmd_args) logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) if dry_run: return if field is None: field = str(expnums[0]) storage.mkdir( os.path.dirname( storage.get_uri(field, ccd=ccd, version=version, prefix=prefix))) for ext in ['moving.jmp', 'moving.matt']: uri = storage.get_uri(field, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = '%s%d%s%s.%s' % (prefix, expnums[0], version, str(ccd).zfill(2), ext) storage.copy(filename, uri) return
def step2(expnums, ccd, version, prefix=None, dry_run=False, default="WCS"): """run the actual step2 on the given exp/ccd combo""" jmp_trans = ['step2ajmp'] jmp_args = ['step2bjmp'] matt_args = ['step2matt_jmp'] idx = 0 for expnum in expnums: jmp_args.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8] ) jmp_trans.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.jmp', prefix=prefix)[0:-8] ) idx += 1 matt_args.append('-f%d' % idx) matt_args.append( storage.get_file(expnum, ccd=ccd, version=version, ext='obj.matt', prefix=prefix)[0:-9] ) logging.info(util.exec_prog(jmp_trans)) if default == "WCS": logging.info(compute_trans(expnums, ccd, version, prefix, default=default)) logging.info(util.exec_prog(jmp_args)) logging.info(util.exec_prog(matt_args)) ## check that the shifts from step2 are rational check_args = ['checktrans'] if os.access('proc-these-files', os.R_OK): os.unlink('proc-these-files') ptf = open('proc-these-files', 'w') ptf.write("# A dummy file that is created so checktrans could run.\n") ptf.write("# Frame FWHM PSF?\n") for expnum in expnums: filename = os.path.splitext(storage.get_image(expnum, ccd, version=version, prefix=prefix))[0] if not os.access(filename + ".bright.psf", os.R_OK): os.link(filename + ".bright.jmp", filename + ".bright.psf") if not os.access(filename + ".obj.psf", os.R_OK): os.link(filename + ".obj.jmp", filename + ".obj.psf") ptf.write("{:>19s}{:>10.1f}{:>5s}\n".format(filename, _FWHM, "NO")) ptf.close() if os.access('BAD_TRANS', os.F_OK): os.unlink('BAD_TRANS') logging.info(util.exec_prog(check_args)) if os.access('BAD_TRANS', os.F_OK): raise OSError(errno.EBADMSG, 'BAD_TRANS') if os.access('proc-these-files', os.F_OK): os.unlink('proc-these-files') if dry_run: return for expnum in expnums: for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']: uri = storage.dbimages_uri(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) filename = os.path.basename(uri) storage.copy(filename, uri) return
def run(expnum, ccd, prefix=None, version='p', field=None, measure3_dir=storage.MEASURE3, dry_run=False, force=False): message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info( "{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, version, ccd): raise IOError(35, "Cannot start {} as {} not yet completed " "for {}{}{}{:02d}".format(task, dependency, prefix, expnum, version, ccd)) if field is None: field = str(expnum) if prefix is not None and len(prefix) > 0: field = "%s_%s" % (prefix, field) field += "_%s%s" % (str(version), str(ccd)) logging.info("Doing combine on field {}".format(field)) for ext in ['moving.matt', 'moving.jmp']: storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix) # Get the list of objects planted into the field if prefix='fk' if prefix == 'fk': storage.get_file('Object', version='', ext='planted', subdir=str(expnum) + "/ccd%s" % ( str(ccd).zfill(2))) else: prefix = '' cmd_args = ['comb-list', prefix + str(expnum) + version + str(ccd).zfill(2)] logging.info(str(cmd_args)) logging.info(util.exec_prog(cmd_args)) # things to copy back to VOSpace, if this is an 'fk' image # then we have missed and found files too. ext_list = ['cands.comb'] if prefix == 'fk': ext_list.extend(['jmp.missed', 'matt.missed', 'jmp.found', 'matt.found', 'comb.missed', 'comb.found']) for ext in ext_list: uri = storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=version, ext=ext) filename = os.path.basename(uri) if not os.access(filename, os.R_OK): logging.critical("No %s file" % filename) continue vospace_name = "%s.%s" % (field, ext) if not dry_run: logging.info("%s -> %s" % ( filename, os.path.join(measure3_dir, vospace_name))) storage.copy(filename, os.path.join(measure3_dir, vospace_name)) base_name = prefix + str(expnum) + version + str(ccd).zfill(2) cands_file = base_name + '.cands.comb' if not os.access(cands_file, os.R_OK): no_cands_file = (prefix + str(expnum) + version + str(ccd).zfill(2) + '.no_candidates') open(no_cands_file, 'w').close() if not dry_run: vospace_name = "%s.no_candidates" % field storage.copy(no_cands_file, os.path.join(measure3_dir, vospace_name)) else: measure3.run(base_name, storage.DBIMAGES) if not dry_run: filename = base_name + ".measure3.cands.astrom" vospace_filename = "%s.measure3.cands.astrom" % field storage.copy(filename, os.path.join(measure3_dir, vospace_filename)) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)
def step1(expnum, ccd, prefix='', version='p', sex_thresh=_SEX_THRESHOLD, wave_thresh=_WAVE_THRESHOLD, maxcount=_MAX_COUNT, dry_run=False): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader') filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version) basename = os.path.splitext(filename)[0] logging.info(util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)])) obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.jmp', prefix=prefix) obj_filename = basename + ".obj.jmp" if not dry_run: storage.copy(obj_filename, obj_uri) ## for step1matt we need the weight image hdulist = fits.open(filename) flat_name = hdulist[0].header.get('FLAT', 'weight.fits') parts = os.path.splitext(flat_name) if parts[1] == '.fz': flat_name = os.path.splitext(parts[0])[0] else: flat_name = parts[0] try: flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators') except: flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='old_calibrators') if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') if not os.access('weight.fits', os.R_OK): os.symlink(flat_filename, 'weight.fits') logging.info(util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)])) if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.matt', prefix=prefix) obj_filename = basename + ".obj.matt" if not dry_run: storage.copy(obj_filename, obj_uri) return True
def run(expnum, ccd, prefix='', version='p', sex_thresh=_SEX_THRESHOLD, wave_thresh=_WAVE_THRESHOLD, maxcount=_MAX_COUNT, dry_run=False, force=True, ignore=False): """run the actual step1jmp/matt codes. expnum: the CFHT expousre to process ccd: which ccd in the mosaic to process fwhm: the image quality, FWHM, of the image. In pixels. sex_thresh: the detection threhold to run sExtractor at wave_thresh: the detection threshold for wavelet maxcount: saturation level """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status(dependency, prefix, expnum, version, ccd) and not ignore: raise IOError(35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}".format( task, dependency, prefix, expnum, version, ccd)) logging.info("Retrieving imaging and input parameters from VOSpace") storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader') filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version, default=3.5) basename = os.path.splitext(filename)[0] _get_weight_map(filename, ccd) logging.info("Launching step1jmp") logging.info(util.exec_prog(['step1jmp', '-f', basename, '-t', str(wave_thresh), '-w', str(fwhm), '-m', str(maxcount)])) logging.info(util.exec_prog(['step1matt', '-f', basename, '-t', str(sex_thresh), '-w', str(fwhm), '-m', str(maxcount)])) if os.access('weight.fits', os.R_OK): os.unlink('weight.fits') if not dry_run: for ext in ['obj.jmp', 'obj.matt']: obj_uri = storage.get_uri(expnum, ccd, version=version, ext=ext, prefix=prefix) obj_filename = basename + "." + ext count = 0 with open(obj_filename, 'r'): while True: try: count += 1 logging.info("Attempt {} to copy {} -> {}".format(count, obj_filename, obj_uri)) storage.copy(obj_filename, obj_uri) break except Exception as ex: if count > 10: raise ex logging.info(message) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)
def plant(expnums, ccd, rmin, rmax, ang, width, number=10, version='s', dry_run=False): """Plant artificial sources into the list of images provided. :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to :param ccd: which ccd to work on. :param rmin: The minimum rate of motion to add sources at (''/hour) :param rmax: The maximum rate of motion to add sources at (''/hour) :param ang: The mean angle of motion to add sources :param width: The +/- range of angles of motion :param version: Add sources to the 'o', 'p' or 's' images :param dry_run: don't push results to VOSpace. """ # Construct a list of artificial KBOs with positions in the image # and rates of motion within the bounds given by the caller. filename = storage.get_image(expnums[0], ccd=ccd, version=version) header = fits.open(filename)[0].header bounds = util.get_pixel_bounds_from_datasec_keyword( header.get('DATASEC', '[33:2080,1:4612]')) # generate a set of artifical KBOs to add to the image. kbos = Table(names=('x', 'y', 'mag', 'sky_rate', 'angle', 'id')) for kbo in KBOGenerator(n=number, x=Range(bounds[0][0], bounds[0][1]), y=Range(bounds[1][0], bounds[1][1]), rate=Range(rmin, rmax), angle=Range(ang - width, ang + width), mag=Range(21.0, 25.0)): kbos.add_row(kbo) fd = open('Object.planted', 'w') fd.write("# ") kbos.write(fd, format='ascii.fixed_width', delimiter=None) fd.close() for expnum in expnums: filename = storage.get_image(expnum, ccd, version) psf = storage.get_file(expnum, ccd, version, ext='psf.fits') plant_kbos(filename, psf, kbos, get_shifts(expnum, ccd, version), "fk") if dry_run: return uri = storage.get_uri('Object', ext='planted', version='', subdir=str(expnums[0]) + "/ccd%s" % (str(ccd).zfill(2))) storage.copy('Object.planted', uri) for expnum in expnums: uri = storage.get_uri(expnum, ccd=ccd, version=version, ext='fits', prefix='fk') filename = os.path.basename(uri) storage.copy(filename, uri) return
def run(expnum, ccd, version, dry_run=False, prefix="", force=False): """Run the OSSOS jmpmakepsf script. """ message = storage.SUCCESS if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format( task, prefix, expnum, version, ccd)) return with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run): try: if not storage.get_status( dependency, prefix, expnum, version, ccd=ccd): raise IOError("{} not yet run for {}".format( dependency, expnum)) # confirm destination directory exists. destdir = os.path.dirname( storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext='fits')) if not dry_run: storage.mkdir(destdir) # get image from the vospace storage area logging.info("Getting fits image from VOSpace") filename = storage.get_image(expnum, ccd, version=version, prefix=prefix) # get mopheader from the vospace storage area logging.info("Getting mopheader from VOSpace") mopheader_filename = storage.get_file(expnum, ccd, version=version, prefix=prefix, ext='mopheader') # run mkpsf process logging.info("Running mkpsf on %s %d" % (expnum, ccd)) logging.info( util.exec_prog( ['jmpmakepsf.csh', './', filename, 'yes', 'yes'])) if dry_run: return # place the results into VOSpace basename = os.path.splitext(filename)[0] for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor', 'fwhm', 'phot'): dest = storage.dbimages_uri(expnum, ccd, prefix=prefix, version=version, ext=ext) source = basename + "." + str(ext) count = 0 with open(source, 'r'): while True: count += 1 try: logging.info("Attempt {} to copy {} -> {}".format( count, source, dest)) storage.copy(source, dest) break except Exception as ex: if count > 10: raise ex # set some data parameters associated with the image, determined in this step. storage.set_status('fwhm', prefix, expnum, version=version, ccd=ccd, status=str( storage.get_fwhm(expnum, ccd=ccd, prefix=prefix, version=version))) storage.set_status('zeropoint', prefix, expnum, version=version, ccd=ccd, status=str( storage.get_zeropoint(expnum, ccd=ccd, prefix=prefix, version=version))) logging.info(message) except Exception as e: message = str(e) logging.error(message) storage.set_status(task, prefix, expnum, version, ccd=ccd, status=message) return
def align(expnums, ccd, version='s', prefix='', dry_run=False, force=True): """Create a 'shifts' file that transforms the space/flux/time scale of all images to the first image. This function relies on the .fwhm, .trans.jmp, .phot and .zeropoint.used files for inputs. The scaling we are computing here is for use in planting sources into the image at the same sky/flux locations while accounting for motions of sources with time. :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to, the first frame in the list is the reference. :param ccd: which ccd to work on. :param prefix: put this string in front of expnum when looking for exposure, normally '' or 'fk' :param force: When true run task even if this task is recorded as having succeeded :param version: Add sources to the 'o', 'p' or 's' images :param dry_run: don't push results to VOSpace. """ message = storage.SUCCESS if storage.get_status(task, prefix, expnums[0], version, ccd) and not force: logging.info("{} completed successfully for {} {} {} {}".format( task, prefix, expnums[0], version, ccd)) return # Get the images and supporting files that we need from the VOSpace area # get_image and get_file check if the image/file is already on disk. # re-computed fluxes from the PSF stars and then recompute x/y/flux scaling. # some dictionaries to hold the various scale pos = {} apcor = {} mags = {} zmag = {} mjdates = {} with storage.LoggingManager(task, prefix, expnums[0], ccd, version, dry_run): try: for expnum in expnums: filename = storage.get_image(expnum, ccd=ccd, version=version) zmag[expnum] = storage.get_zeropoint(expnum, ccd, prefix=None, version=version) mjdates[expnum] = float( fits.open(filename)[0].header.get('MJD-OBS')) apcor[expnum] = [ float(x) for x in open( storage.get_file( expnum, ccd=ccd, version=version, ext=storage.APCOR_EXT)).read().split() ] keys = ['crval1', 'cd1_1', 'cd1_2', 'crval2', 'cd2_1', 'cd2_2'] # load the .trans.jmp values into a 'wcs' like dictionary. # .trans.jmp maps current frame to reference frame in pixel coordinates. # the reference frame of all the frames supplied must be the same. shifts = dict( list( zip(keys, [ float(x) for x in open( storage.get_file( expnum, ccd=ccd, version=version, ext='trans.jmp')).read().split() ]))) shifts['crpix1'] = 0.0 shifts['crpix2'] = 0.0 # now create a wcs object based on those transforms, this wcs links the current frame's # pixel coordinates to the reference frame's pixel coordinates. w = get_wcs(shifts) # get the PHOT file that was produced by the mkpsf routine logging.debug("Reading .phot file {}".format(expnum)) phot = ascii.read(storage.get_file(expnum, ccd=ccd, version=version, ext='phot'), format='daophot') # compute the small-aperture magnitudes of the stars used in the PSF logging.debug("Running phot on {}".format(filename)) mags[expnum] = daophot.phot(filename, phot['XCENTER'], phot['YCENTER'], aperture=apcor[expnum][0], sky=apcor[expnum][1] + 1, swidth=apcor[expnum][0], zmag=zmag[expnum]) # covert the x/y positions to positions in Frame 1 based on the trans.jmp values. logging.debug( "Doing the XY translation to refrence frame: {}".format(w)) (x, y) = w.wcs_pix2world(mags[expnum]["XCENTER"], mags[expnum]["YCENTER"], 1) pos[expnum] = numpy.transpose([x, y]) # match this exposures PSF stars position against those in the first image of the set. logging.debug("Matching lists") idx1, idx2 = util.match_lists(pos[expnums[0]], pos[expnum]) # compute the magnitdue offset between the current frame and the reference. dmags = numpy.ma.array( mags[expnums[0]]["MAG"] - apcor[expnums[0]][2] - (mags[expnum]["MAG"][idx1] - apcor[expnum][2]), mask=idx1.mask) dmags.sort() # logging.debug("Computed dmags between input and reference: {}".format(dmags)) error_count = 0 error_count += 1 logging.debug("{}".format(error_count)) # compute the median and determine if that shift is small compared to the scatter. try: midx = int( numpy.sum(numpy.any([~dmags.mask], axis=0)) / 2.0) dmag = float(dmags[midx]) logging.debug("Computed a mag delta of: {}".format(dmag)) except Exception as e: logging.error(str(e)) logging.error( "Failed to compute mag offset between plant and found using: {}" .format(dmags)) dmag = 99.99 error_count += 1 logging.debug("{}".format(error_count)) try: if math.fabs(dmag) > 3 * (dmags.std() + 0.01): logging.warning( "Magnitude shift {} between {} and {} is large: {}" .format(dmag, expnums[0], expnum, shifts)) except Exception as e: logging.error(str(e)) error_count += 1 logging.debug("{}".format(error_count)) shifts['dmag'] = dmag shifts['emag'] = dmags.std() shifts['nmag'] = len(dmags.mask) - dmags.mask.sum() shifts['dmjd'] = mjdates[expnums[0]] - mjdates[expnum] shift_file = os.path.basename( storage.get_uri(expnum, ccd, version, '.shifts')) error_count += 1 logging.debug("{}".format(error_count)) try: fh = open(shift_file, 'w') fh.write( json.dumps(shifts, sort_keys=True, indent=4, separators=(',', ': '), cls=NpEncoder)) fh.write('\n') fh.close() except Exception as e: logging.error( "Creation of SHIFTS file failed while trying to write: {}" .format(shifts)) raise e error_count += 1 logging.debug("{}".format(error_count)) if not dry_run: storage.copy( shift_file, storage.get_uri(expnum, ccd, version, '.shifts')) logging.info(message) except Exception as ex: message = str(ex) logging.error(message) if not dry_run: storage.set_status(task, prefix, expnum, version, ccd, status=message)