Ejemplo n.º 1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('field')
    parser.add_argument('ccd')
    parser.add_argument(
        '--expnum',
        default=None,
        help="Which exposure is the lead for this astrom file?")
    parser.add_argument(
        '--astrom-filename',
        default=None,
        help="Give the astrom file directly instead of looking-up "
        "using the field/ccd naming scheme.")
    parser.add_argument('--reals', action='store_true', default=False)
    parser.add_argument('--type',
                        choices=['o', 'p', 's'],
                        help="Which type of image.",
                        default='s')
    parser.add_argument('--measure3',
                        default='vos:OSSOS/measure3/2013B-L_redo/')
    parser.add_argument('--dbimages', default=None)
    parser.add_argument('--dry-run', action='store_true', default=False)
    parser.add_argument('--force', action='store_true', default=False)
    parser.add_argument("--fk",
                        action="store_true",
                        default=False,
                        help="Do fakes?")
    parser.add_argument('--object-planted',
                        default=OBJECT_PLANTED,
                        help="Name of file contains list of planted objects.")
    parser.add_argument(
        '--bright-limit',
        default=BRIGHT_LIMIT,
        type=float,
        help=
        "Sources brighter than this limit {} are used to diagnose planting issues."
        .format(BRIGHT_LIMIT))
    parser.add_argument(
        '--minimum-bright-detections',
        default=MINIMUM_BRIGHT_DETECTIONS,
        type=int,
        help=
        "required number of detections with mag brighter than bright-limit.")
    parser.add_argument(
        '--minimum-bright-fraction',
        default=MINIMUM_BRIGHT_FRACTION,
        type=float,
        help=
        "minimum fraction of objects above bright limit that should be found.")
    parser.add_argument("--debug", action="store_true")
    parser.add_argument("--verbose", "-v", action="store_true")

    cmd_line = " ".join(sys.argv)
    args = parser.parse_args()

    util.set_logger(args)
    logging.info("Starting {}".format(cmd_line))
    prefix = (args.fk and "fk") or ""
    version = args.type
    ext = 'cands'
    if args.reals:
        ext = 'reals'

    if args.dbimages is not None:
        storage.DBIMAGES = args.dbimages
        astrom.DATASET_ROOT = args.dbimages
    storage.MEASURE3 = args.measure3
    astrom_uri = storage.get_cands_uri(args.field,
                                       ccd=args.ccd,
                                       version=args.type,
                                       prefix=prefix,
                                       ext="measure3.{}.astrom".format(ext))

    if args.astrom_filename is None:
        astrom_filename = os.path.basename(astrom_uri)
    else:
        astrom_filename = args.astrom_filename

    if not os.access(astrom_filename, os.F_OK):
        astrom_filename = os.path.dirname(astrom_uri) + "/" + astrom_filename

    # Load the list of astrometric observations that will be looked at.
    fk_candidate_observations = astrom.parse(astrom_filename)
    if args.expnum is None:
        expnum = fk_candidate_observations.observations[0].expnum
    else:
        expnum = args.expnum

    message = storage.SUCCESS

    if storage.get_status(task, prefix, expnum, version='',
                          ccd=args.ccd) and not args.force:
        logging.info("{} completed successfully for {} {} {} {}".format(
            task, prefix, expnum, '', args.ccd))
        return

    with storage.LoggingManager(task, prefix, expnum, args.ccd, version,
                                args.dry_run):
        try:

            match_filename = os.path.splitext(
                os.path.basename(astrom_filename))[0] + '.match'
            logging.info(f'Got back:{match_filename}')
            match_uri = storage.get_cands_uri(
                args.field,
                ccd=args.ccd,
                version=args.type,
                prefix=prefix,
                ext="measure3.{}.match".format(ext),
                block=args.field)

            try:
                storage.copy(match_uri, match_filename)
            except NotFoundException as ex:
                logging.warning(
                    f'No match file found at {match_uri}, creating one.')
                logging.debug(f'{ex}')
                pass

            logging.info(
                ("Comparing planted and measured magnitudes "
                 "for sources in {} and {}\n".format(args.object_planted,
                                                     astrom_filename)))
            result = match_planted(
                fk_candidate_observations,
                match_filename=match_filename,
                object_planted=args.object_planted,
                bright_limit=args.bright_limit,
                minimum_bright_detections=args.minimum_bright_detections,
                bright_fraction=args.minimum_bright_fraction)
            if not args.dry_run:
                storage.copy(match_filename, match_uri)
                uri = os.path.dirname(astrom_uri)
                keys = [storage.tag_uri(os.path.basename(astrom_uri))]
                values = [result]
                storage.set_tags_on_uri(uri, keys, values)
            logger.info(message)
        except Exception as err:
            import traceback
            traceback.print_exc()
            message = str(err)
            logging.error(message)

        if not args.dry_run:
            storage.set_status(task,
                               prefix,
                               expnum,
                               version='',
                               ccd=args.ccd,
                               status=message)

    return
Ejemplo n.º 2
0
def run(expnums,
        ccd,
        version,
        rate_min,
        rate_max,
        angle,
        width,
        field=None,
        prefix=None,
        dry_run=False,
        force=False):
    """run the actual step2  on the given exp/ccd combo"""

    jmp_args = ['step3jmp']
    matt_args = ['step3matt']

    if storage.get_status(task, prefix, expnums[0], version=version,
                          ccd=ccd) and not force:
        logging.info("{} completed successfully for {}{}{}{:02d}".format(
            task, prefix, expnums[0], version, ccd))
        return

    with storage.LoggingManager(task, prefix, expnums[0], ccd, version,
                                dry_run):
        try:
            if not storage.get_status(
                    dependency, prefix, expnums[0], version=version, ccd=ccd):
                raise IOError(
                    35, "Cannot start {} as {} not yet completed {}{}{}{:02d}".
                    format(task, dependency, prefix, expnums[0], version, ccd))
            # Default message is success, message gets overwritten with failure messages.
            message = storage.SUCCESS

            idx = 0
            cmd_args = []
            for expnum in expnums:
                idx += 1
                for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']:
                    storage.get_file(expnum,
                                     ccd=ccd,
                                     version=version,
                                     ext=ext,
                                     prefix=prefix)
                image = os.path.splitext(
                    os.path.basename(
                        storage.get_uri(expnum,
                                        ccd,
                                        version=version,
                                        prefix=prefix)))[0]
                cmd_args.append('-f%d' % idx)
                cmd_args.append(image)

            cmd_args.extend([
                '-rn',
                str(rate_min), '-rx',
                str(rate_max), '-a',
                str(angle), '-w',
                str(width)
            ])
            jmp_args.extend(cmd_args)
            matt_args.extend(cmd_args)
            logging.info(util.exec_prog(jmp_args))
            logging.info(util.exec_prog(matt_args))

            if dry_run:
                return

            if field is None:
                field = str(expnums[0])

            # Make sure a dbimages destination exists for this file.
            storage.mkdir(
                os.path.dirname(
                    storage.get_uri(field,
                                    ccd=ccd,
                                    version=version,
                                    prefix=prefix)))

            for ext in ['moving.jmp', 'moving.matt']:
                uri = storage.get_uri(field,
                                      ccd=ccd,
                                      version=version,
                                      ext=ext,
                                      prefix=prefix)
                filename = '%s%d%s%s.%s' % (prefix, expnums[0], version,
                                            str(ccd).zfill(2), ext)
                storage.copy(filename, uri)

        except Exception as ex:
            message = str(ex)
            logging.error(message)

        storage.set_status(task,
                           prefix,
                           expnums[0],
                           version=version,
                           ccd=ccd,
                           status=message)

    return
Ejemplo n.º 3
0
Archivo: mkpsf.py Proyecto: ijiraq/MOP
def run(expnum, ccd, version, dry_run=False, prefix="", force=False):
    """Run the OSSOS jmpmakepsf script.

    """

    message = storage.SUCCESS
    if storage.get_status(task, prefix, expnum, version=version,
                          ccd=ccd) and not force:
        logging.info("{} completed successfully for {} {} {} {}".format(
            task, prefix, expnum, version, ccd))
        return

    with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run):
        try:
            if not storage.get_status(
                    dependency, prefix, expnum, version, ccd=ccd):
                raise IOError("{} not yet run for {}".format(
                    dependency, expnum))

            # confirm destination directory exists.
            destdir = os.path.dirname(
                storage.dbimages_uri(expnum,
                                     ccd,
                                     prefix=prefix,
                                     version=version,
                                     ext='fits'))
            if not dry_run:
                storage.mkdir(destdir)

            # get image from the vospace storage area
            logging.info("Getting fits image from VOSpace")
            filename = storage.get_image(expnum,
                                         ccd,
                                         version=version,
                                         prefix=prefix)

            # get mopheader from the vospace storage area
            logging.info("Getting mopheader from VOSpace")
            mopheader_filename = storage.get_file(expnum,
                                                  ccd,
                                                  version=version,
                                                  prefix=prefix,
                                                  ext='mopheader')

            # run mkpsf process
            logging.info("Running mkpsf on %s %d" % (expnum, ccd))
            logging.info(
                util.exec_prog(
                    ['jmpmakepsf.csh', './', filename, 'yes', 'yes']))

            if dry_run:
                return

            # place the results into VOSpace
            basename = os.path.splitext(filename)[0]

            for ext in ('mopheader', 'psf.fits', 'zeropoint.used', 'apcor',
                        'fwhm', 'phot'):
                dest = storage.dbimages_uri(expnum,
                                            ccd,
                                            prefix=prefix,
                                            version=version,
                                            ext=ext)
                source = basename + "." + str(ext)
                count = 0
                with open(source, 'r'):
                    while True:
                        count += 1
                        try:
                            logging.info("Attempt {} to copy {} -> {}".format(
                                count, source, dest))
                            storage.copy(source, dest)
                            break
                        except Exception as ex:
                            if count > 10:
                                raise ex

            # set some data parameters associated with the image, determined in this step.
            storage.set_status('fwhm',
                               prefix,
                               expnum,
                               version=version,
                               ccd=ccd,
                               status=str(
                                   storage.get_fwhm(expnum,
                                                    ccd=ccd,
                                                    prefix=prefix,
                                                    version=version)))
            storage.set_status('zeropoint',
                               prefix,
                               expnum,
                               version=version,
                               ccd=ccd,
                               status=str(
                                   storage.get_zeropoint(expnum,
                                                         ccd=ccd,
                                                         prefix=prefix,
                                                         version=version)))
            logging.info(message)
        except Exception as e:
            message = str(e)
            logging.error(message)

        storage.set_status(task,
                           prefix,
                           expnum,
                           version,
                           ccd=ccd,
                           status=message)

    return
Ejemplo n.º 4
0
Archivo: plant.py Proyecto: ijiraq/MOP
def plant(expnums,
          ccd,
          rmin,
          rmax,
          ang,
          width,
          number=10,
          mmin=21.0,
          mmax=25.5,
          version='s',
          dry_run=False,
          force=True):
    """Plant artificial sources into the list of images provided.

    @param dry_run: don't push results to VOSpace.
    @param width: The +/- range of angles of motion
    @param ang: The mean angle of motion to add sources
    @param rmax: The maximum rate of motion to add sources at (''/hour)
    @param rmin: The minimum rate of motion to add sources at (''/hour)
    @param expnums: list of MegaPrime exposure numbers to add artificial KBOs to
    @param ccd: which ccd to work on.
    @param mmax: Maximum magnitude to plant sources at
    @param version: Add sources to the 'o', 'p' or 's' images
    @param mmin: Minimum magnitude to plant sources at
    @param number: number of sources to plant.
    @param force: Run, even if we already succeeded at making a fk image.
    """
    message = storage.SUCCESS

    if storage.get_status(task, "", expnums[0], version, ccd) and not force:
        logging.info("{} completed successfully for {}{}{}{:02d}".format(
            task, "", expnums[0], version, ccd))
        return

    with storage.LoggingManager(task, "", expnums[0], ccd, version, dry_run):
        try:
            # Construct a list of artificial KBOs with positions in the image
            # and rates of motion within the bounds given by the caller.
            filename = storage.get_image(expnums[0], ccd=ccd, version=version)
            header = fits.open(filename)[0].header
            bounds = util.get_pixel_bounds_from_datasec_keyword(
                header.get('DATASEC', '[33:2080,1:4612]'))

            # generate a set of artificial KBOs to add to the image.
            kbos = KBOGenerator.get_kbos(n=number,
                                         rate=(rmin, rmax),
                                         angle=(ang - width, ang + width),
                                         mag=(mmin, mmax),
                                         x=(bounds[0][0], bounds[0][1]),
                                         y=(bounds[1][0], bounds[1][1]),
                                         filename='Object.planted')

            for expnum in expnums:
                filename = storage.get_image(expnum, ccd, version)
                psf = storage.get_file(expnum, ccd, version, ext='psf.fits')
                plant_kbos(filename, psf, kbos,
                           get_shifts(expnum, ccd, version), "fk")

            if dry_run:
                return
            uri = storage.get_uri('Object',
                                  ext='planted',
                                  version='',
                                  subdir=f"{expnums[0]}/ccd{int(ccd):02d}")

            storage.copy('Object.planted', uri)
            for expnum in expnums:
                uri = storage.get_uri(expnum,
                                      ccd=ccd,
                                      version=version,
                                      ext='fits',
                                      prefix='fk')
                filename = os.path.basename(uri)
                storage.copy(filename, uri)
        except Exception as ex:
            message = str(ex)
            logging.error(message)

        storage.set_status(task, "", expnums[0], version, ccd, status=message)

    return
Ejemplo n.º 5
0
Archivo: combine.py Proyecto: OSSOS/MOP
def run(expnum, ccd, prefix=None, version='p', field=None,
        measure3_dir=storage.MEASURE3, dry_run=False, force=False):

    message = storage.SUCCESS

    if storage.get_status(task, prefix, expnum, version, ccd) and not force:
        logging.info(
            "{} completed successfully for {} {} {} {}".format(task, prefix,
                                                               expnum, version,
                                                               ccd))
        return

    with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run):
        try:
            if not storage.get_status(dependency, prefix, expnum, version, ccd):
                raise IOError(35, "Cannot start {} as {} not yet completed "
                                  "for {}{}{}{:02d}".format(task, dependency,
                                                            prefix, expnum,
                                                            version, ccd))
            if field is None:
                field = str(expnum)

            if prefix is not None and len(prefix) > 0:
                field = "%s_%s" % (prefix, field)
            field += "_%s%s" % (str(version), str(ccd))

            logging.info("Doing combine on field {}".format(field))

            for ext in ['moving.matt', 'moving.jmp']:
                storage.get_file(expnum, ccd=ccd, version=version, ext=ext,
                                 prefix=prefix)

            # Get the list of objects planted into the field if prefix='fk'
            if prefix == 'fk':
                storage.get_file('Object',
                                 version='',
                                 ext='planted',
                                 subdir=str(expnum) + "/ccd%s" % (
                                     str(ccd).zfill(2)))
            else:
                prefix = ''

            cmd_args = ['comb-list',
                        prefix + str(expnum) + version + str(ccd).zfill(2)]
            logging.info(str(cmd_args))
            logging.info(util.exec_prog(cmd_args))

            # things to copy back to VOSpace, if this is an 'fk' image
            # then we have missed and found files too.
            ext_list = ['cands.comb']
            if prefix == 'fk':
                ext_list.extend(['jmp.missed', 'matt.missed',
                                 'jmp.found', 'matt.found',
                                 'comb.missed', 'comb.found'])

            for ext in ext_list:
                uri = storage.get_uri(expnum,
                                      ccd=ccd,
                                      prefix=prefix,
                                      version=version,
                                      ext=ext)
                filename = os.path.basename(uri)
                if not os.access(filename, os.R_OK):
                    logging.critical("No %s file" % filename)
                    continue
                vospace_name = "%s.%s" % (field, ext)
                if not dry_run:
                    logging.info("%s -> %s" % (
                        filename, os.path.join(measure3_dir, vospace_name)))
                    storage.copy(filename, os.path.join(measure3_dir, vospace_name))

            base_name = prefix + str(expnum) + version + str(ccd).zfill(2)
            cands_file = base_name + '.cands.comb'

            if not os.access(cands_file, os.R_OK):
                no_cands_file = (prefix +
                                 str(expnum) +
                                 version +
                                 str(ccd).zfill(2) +
                                 '.no_candidates')
                open(no_cands_file, 'w').close()
                if not dry_run:
                    vospace_name = "%s.no_candidates" % field
                    storage.copy(no_cands_file,
                                 os.path.join(measure3_dir, vospace_name))

            else:
                
                measure3.run(base_name, storage.DBIMAGES)

                if not dry_run:
                    filename = base_name + ".measure3.cands.astrom"
                    vospace_filename = "%s.measure3.cands.astrom" % field
                    storage.copy(filename, os.path.join(measure3_dir, vospace_filename))

        except Exception as ex:
            message = str(ex)
            logging.error(message)

        if not dry_run:
            storage.set_status(task, prefix, expnum, version, ccd,
                               status=message)
Ejemplo n.º 6
0
def run(expnums,
        ccd,
        version,
        prefix=None,
        dry_run=False,
        default="WCS",
        force=False):
    """run the actual step2  on the given exp/ccd combo"""

    jmp_trans = ['step2ajmp']
    jmp_args = ['step2bjmp']
    matt_args = ['step2matt_jmp']

    if storage.get_status(task, prefix, expnums[0], version,
                          ccd) and not force:
        logging.info("{} completed successfully for {}{}{}{:02d}".format(
            task, prefix, expnums[0], version, ccd))
        return

    with storage.LoggingManager(task, prefix, expnums[0], ccd, version,
                                dry_run):
        try:
            for expnum in expnums:
                if not storage.get_status(
                        dependency, prefix, expnum, version=version, ccd=ccd):
                    raise IOError(
                        35,
                        "Cannot start {} as {} not yet completed for {}{}{}{:02d}"
                        .format(task, dependency, prefix, expnum, version,
                                ccd))
            message = storage.SUCCESS

            idx = 0
            logging.info("Retrieving catalog files to do matching.")
            for expnum in expnums:
                jmp_args.append(
                    storage.get_file(expnum,
                                     ccd=ccd,
                                     version=version,
                                     ext='obj.jmp',
                                     prefix=prefix)[0:-8])
                jmp_trans.append(
                    storage.get_file(expnum,
                                     ccd=ccd,
                                     version=version,
                                     ext='obj.jmp',
                                     prefix=prefix)[0:-8])
                idx += 1
                matt_args.append('-f%d' % idx)
                matt_args.append(
                    storage.get_file(expnum,
                                     ccd=ccd,
                                     version=version,
                                     ext='obj.matt',
                                     prefix=prefix)[0:-9])

            logging.info(
                "Computing the catalog alignment using sources in catalogs.")
            try:
                logging.info(util.exec_prog(jmp_trans))
                if default == "WCS":
                    logging.info("Comparing computed transform to WCS values")
                    logging.info(
                        compute_trans(expnums,
                                      ccd,
                                      version,
                                      prefix,
                                      default=default))
            except Exception as ex:
                logging.info("JMP Trans failed: {}".format(ex))
                logging.info(
                    compute_trans(expnums, ccd, version, prefix,
                                  default="WCS"))

            logging.info("Using transform to match catalogs for three images.")
            logging.info(util.exec_prog(jmp_args))
            logging.info(util.exec_prog(matt_args))

            # check that the shifts from step2 are rational by matching the bright star lists.
            logging.info(
                "Uisng checktrans to ensure that transforms were reasonable.")
            check_args = ['checktrans']
            if os.access('proc-these-files', os.R_OK):
                os.unlink('proc-these-files')
            ptf = open('proc-these-files', 'w')
            ptf.write(
                "# A dummy file that is created so checktrans could run.\n")
            ptf.write("# Frame FWHM PSF?\n")
            for expnum in expnums:
                filename = os.path.splitext(
                    storage.get_image(expnum,
                                      ccd,
                                      version=version,
                                      prefix=prefix))[0]
                if not os.access(filename + ".bright.psf", os.R_OK):
                    os.link(filename + ".bright.jmp", filename + ".bright.psf")
                if not os.access(filename + ".obj.psf", os.R_OK):
                    os.link(filename + ".obj.jmp", filename + ".obj.psf")
                ptf.write("{:>19s}{:>10.1f}{:>5s}\n".format(
                    filename, _FWHM, "NO"))
            ptf.close()
            if os.access('BAD_TRANS', os.F_OK):
                os.unlink('BAD_TRANS')

            logging.info(util.exec_prog(check_args))

            if os.access('BAD_TRANS', os.F_OK):
                raise OSError(errno.EBADMSG, 'BAD_TRANS')

            if os.access('proc-these-files', os.F_OK):
                os.unlink('proc-these-files')

            if dry_run:
                return

            for expnum in expnums:
                for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']:
                    uri = storage.dbimages_uri(expnum,
                                               ccd=ccd,
                                               version=version,
                                               ext=ext,
                                               prefix=prefix)
                    filename = os.path.basename(uri)
                    storage.copy(filename, uri)

        except Exception as ex:
            message = str(ex)
            logging.error(message)

        storage.set_status(task,
                           prefix,
                           expnums[0],
                           version,
                           ccd,
                           status=message)

    return
Ejemplo n.º 7
0
Archivo: slow.py Proyecto: ijiraq/MOP
def run(expnum, ccd, version='p', prefix='', dry_run=False, force=False):

    message = 'success'

    if storage.get_status(task, prefix, expnum, version=version,
                          ccd=ccd) and not force:
        logging.info("{} completed successfully for {} {} {} {}".format(
            task, prefix, expnum, version, ccd))
        return

    with storage.LoggingManager(task=task,
                                prefix=prefix,
                                expnum=expnum,
                                ccd=ccd,
                                version=version,
                                dry_run=dry_run):
        try:
            if not storage.get_status(dependency, prefix, expnum, "p",
                                      ccd=ccd):
                raise IOError("{} not yet run for {}".format(
                    dependency, expnum))

            header = storage.get_astheader(expnum, ccd)
            datasec = storage.datasec_to_list(
                header.get('DATASEC', '[80:2080,30,4160]'))
            try:
                fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd))
            except:
                fwhm = 'unknown'
            for keyword in del_keyword_list:
                try:
                    del (header[keyword])
                except:
                    pass
            header['FWHM'] = (fwhm, 'FWHM in pixels')
            header['EXTNAME'] = 'header'
            primary_hdu = fits.PrimaryHDU(header=header)
            hdu_list = fits.HDUList([
                primary_hdu,
            ])
            for ext in ['jmp', 'matt']:
                extension = 'obj.' + ext
                name = "{}p{:02d}.{}".format(expnum, ccd, extension)
                try:
                    os.unlink(name)
                    os.unlink(name + ".fits")
                except:
                    pass
                logging.info("Retrieving {}".format(name))
                obj_file = mop_file.Parser(expnum, ccd, extension)
                obj_file.parse()

                t = numpy.all([
                    datasec[0] < obj_file.data['X'],
                    obj_file.data['X'] < datasec[1],
                    datasec[2] < obj_file.data['Y'],
                    obj_file.data['Y'] < datasec[3]
                ],
                              axis=0)
                logging.info(
                    "Source remaining after datasec cut: {} of {}".format(
                        len(obj_file.data[t]['X']), len(t)))
                table_hdu = fits.table_to_hdu(obj_file.data[t])
                table_hdu.header['CATALOG'] = name
                table_hdu.header['EXTNAME'] = ext
                hdu_list.append(table_hdu)
                del table_hdu
                del obj_file
                os.unlink(name)

            name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits')
            if os.access(name, os.F_OK):
                os.unlink(name)
            hdu_list.writeto(name)
            uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits")
            logging.info(name + " -> " + uri)
            count = 0
            with open(name):
                while True:
                    count += 1
                    logging.info("Copy attempt {}".format(count))
                    try:
                        storage.copy(name, uri)
                        os.unlink(name)
                        break
                    except Exception as ex:
                        if count > 10:
                            raise ex

            logging.info(message)
        except Exception as e:
            message = str(e)
            logging.error(message)

        if not dry_run:
            storage.set_status(task,
                               prefix,
                               expnum,
                               version=version,
                               ccd=ccd,
                               status=message)
Ejemplo n.º 8
0
def align(expnums, ccd, version='s', prefix='', dry_run=False, force=True):
    """Create a 'shifts' file that transforms the space/flux/time scale of all
    images to the first image.

    This function relies on the .fwhm, .trans.jmp, .phot and .zeropoint.used files for inputs.
    The scaling we are computing here is for use in planting sources into the image at the same sky/flux locations
    while accounting for motions of sources with time.

    :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to,
                    the first frame in the list is the reference.
    :param ccd: which ccd to work on.
    :param prefix: put this string in front of expnum when looking for exposure, normally '' or 'fk'
    :param force: When true run task even if this task is recorded as having succeeded
    :param version: Add sources to the 'o', 'p' or 's' images
    :param dry_run: don't push results to VOSpace.
    """
    message = storage.SUCCESS

    if storage.get_status(task, prefix, expnums[0], version,
                          ccd) and not force:
        logging.info("{} completed successfully for {} {} {} {}".format(
            task, prefix, expnums[0], version, ccd))
        return

    # Get the images and supporting files that we need from the VOSpace area
    # get_image and get_file check if the image/file is already on disk.
    # re-computed fluxes from the PSF stars and then recompute x/y/flux scaling.

    # some dictionaries to hold the various scale
    pos = {}
    apcor = {}
    mags = {}
    zmag = {}
    mjdates = {}

    with storage.LoggingManager(task, prefix, expnums[0], ccd, version,
                                dry_run):
        try:
            for expnum in expnums:
                filename = storage.get_image(expnum, ccd=ccd, version=version)
                zmag[expnum] = storage.get_zeropoint(expnum,
                                                     ccd,
                                                     prefix=None,
                                                     version=version)
                mjdates[expnum] = float(
                    fits.open(filename)[0].header.get('MJD-OBS'))
                apcor[expnum] = [
                    float(x) for x in open(
                        storage.get_file(
                            expnum,
                            ccd=ccd,
                            version=version,
                            ext=storage.APCOR_EXT)).read().split()
                ]
                keys = ['crval1', 'cd1_1', 'cd1_2', 'crval2', 'cd2_1', 'cd2_2']
                # load the .trans.jmp values into a 'wcs' like dictionary.
                # .trans.jmp maps current frame to reference frame in pixel coordinates.
                # the reference frame of all the frames supplied must be the same.
                shifts = dict(
                    list(
                        zip(keys, [
                            float(x) for x in open(
                                storage.get_file(
                                    expnum,
                                    ccd=ccd,
                                    version=version,
                                    ext='trans.jmp')).read().split()
                        ])))
                shifts['crpix1'] = 0.0
                shifts['crpix2'] = 0.0
                # now create a wcs object based on those transforms, this wcs links the current frame's
                # pixel coordinates to the reference frame's pixel coordinates.
                w = get_wcs(shifts)

                # get the PHOT file that was produced by the mkpsf routine
                logging.debug("Reading .phot file {}".format(expnum))
                phot = ascii.read(storage.get_file(expnum,
                                                   ccd=ccd,
                                                   version=version,
                                                   ext='phot'),
                                  format='daophot')

                # compute the small-aperture magnitudes of the stars used in the PSF
                logging.debug("Running phot on {}".format(filename))
                mags[expnum] = daophot.phot(filename,
                                            phot['XCENTER'],
                                            phot['YCENTER'],
                                            aperture=apcor[expnum][0],
                                            sky=apcor[expnum][1] + 1,
                                            swidth=apcor[expnum][0],
                                            zmag=zmag[expnum])

                # covert the x/y positions to positions in Frame 1 based on the trans.jmp values.
                logging.debug(
                    "Doing the XY translation to refrence frame: {}".format(w))
                (x, y) = w.wcs_pix2world(mags[expnum]["XCENTER"],
                                         mags[expnum]["YCENTER"], 1)
                pos[expnum] = numpy.transpose([x, y])
                # match this exposures PSF stars position against those in the first image of the set.
                logging.debug("Matching lists")
                idx1, idx2 = util.match_lists(pos[expnums[0]], pos[expnum])

                # compute the magnitdue offset between the current frame and the reference.
                dmags = numpy.ma.array(
                    mags[expnums[0]]["MAG"] - apcor[expnums[0]][2] -
                    (mags[expnum]["MAG"][idx1] - apcor[expnum][2]),
                    mask=idx1.mask)
                dmags.sort()
                # logging.debug("Computed dmags between input and reference: {}".format(dmags))
                error_count = 0

                error_count += 1
                logging.debug("{}".format(error_count))

                # compute the median and determine if that shift is small compared to the scatter.
                try:
                    midx = int(
                        numpy.sum(numpy.any([~dmags.mask], axis=0)) / 2.0)
                    dmag = float(dmags[midx])
                    logging.debug("Computed a mag delta of: {}".format(dmag))
                except Exception as e:
                    logging.error(str(e))
                    logging.error(
                        "Failed to compute mag offset between plant and found using: {}"
                        .format(dmags))
                    dmag = 99.99

                error_count += 1
                logging.debug("{}".format(error_count))

                try:
                    if math.fabs(dmag) > 3 * (dmags.std() + 0.01):
                        logging.warning(
                            "Magnitude shift {} between {} and {} is large: {}"
                            .format(dmag, expnums[0], expnum, shifts))
                except Exception as e:
                    logging.error(str(e))

                error_count += 1
                logging.debug("{}".format(error_count))

                shifts['dmag'] = dmag
                shifts['emag'] = dmags.std()
                shifts['nmag'] = len(dmags.mask) - dmags.mask.sum()
                shifts['dmjd'] = mjdates[expnums[0]] - mjdates[expnum]
                shift_file = os.path.basename(
                    storage.get_uri(expnum, ccd, version, '.shifts'))

                error_count += 1
                logging.debug("{}".format(error_count))

                try:
                    fh = open(shift_file, 'w')
                    fh.write(
                        json.dumps(shifts,
                                   sort_keys=True,
                                   indent=4,
                                   separators=(',', ': '),
                                   cls=NpEncoder))
                    fh.write('\n')
                    fh.close()
                except Exception as e:
                    logging.error(
                        "Creation of SHIFTS file failed while trying to write: {}"
                        .format(shifts))
                    raise e

                error_count += 1
                logging.debug("{}".format(error_count))

                if not dry_run:
                    storage.copy(
                        shift_file,
                        storage.get_uri(expnum, ccd, version, '.shifts'))
            logging.info(message)
        except Exception as ex:
            message = str(ex)
            logging.error(message)

        if not dry_run:
            storage.set_status(task,
                               prefix,
                               expnum,
                               version,
                               ccd,
                               status=message)
Ejemplo n.º 9
0
Archivo: step1.py Proyecto: OSSOS/MOP
def run(expnum,
        ccd,
        prefix='',
        version='p',
        sex_thresh=_SEX_THRESHOLD,
        wave_thresh=_WAVE_THRESHOLD,
        maxcount=_MAX_COUNT,
        dry_run=False,
        force=True, ignore=False):
    """run the actual step1jmp/matt codes.

    expnum: the CFHT expousre to process
    ccd: which ccd in the mosaic to process
    fwhm: the image quality, FWHM, of the image.  In pixels.
    sex_thresh: the detection threhold to run sExtractor at
    wave_thresh: the detection threshold for wavelet
    maxcount: saturation level

    """
    message = storage.SUCCESS

    if storage.get_status(task, prefix, expnum, version, ccd) and not force:
        logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd))
        return

    with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run):
        try:
            if not storage.get_status(dependency, prefix, expnum, version, ccd) and not ignore:
                raise IOError(35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}".format(
                    task, dependency, prefix, expnum, version, ccd))
            logging.info("Retrieving imaging and input parameters from VOSpace")
            storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader')
            filename = storage.get_image(expnum, ccd, version=version, prefix=prefix)
            fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version, default=3.5)
            basename = os.path.splitext(filename)[0]

            _get_weight_map(filename, ccd)

            logging.info("Launching step1jmp")
            logging.info(util.exec_prog(['step1jmp',
                                         '-f', basename,
                                         '-t', str(wave_thresh),
                                         '-w', str(fwhm),
                                         '-m', str(maxcount)]))

            logging.info(util.exec_prog(['step1matt',
                                         '-f', basename,
                                         '-t', str(sex_thresh),
                                         '-w', str(fwhm),
                                         '-m', str(maxcount)]))

            if os.access('weight.fits', os.R_OK):
                os.unlink('weight.fits')

            if not dry_run:
                for ext in ['obj.jmp', 'obj.matt']:
                    obj_uri = storage.get_uri(expnum, ccd, version=version, ext=ext,
                                              prefix=prefix)
                    obj_filename = basename + "." + ext
                    count = 0
                    with open(obj_filename, 'r'):
                        while True:
                            try:
                                count += 1
                                logging.info("Attempt {} to copy {} -> {}".format(count, obj_filename, obj_uri))
                                storage.copy(obj_filename, obj_uri)
                                break
                            except Exception as ex:
                                if count > 10:
                                    raise ex
            logging.info(message)
        except Exception as ex:
            message = str(ex)
            logging.error(message)

        if not dry_run:
            storage.set_status(task, prefix, expnum, version, ccd, status=message)
Ejemplo n.º 10
0
def scramble(expnums, ccd, version='p', dry_run=False, force=False, prefix=''):
    """
    run the plant script on this combination of exposures

    @param expnums: list of exposure numbers to scramble the time on
    @param ccd:  which CCD in (assumes this is a CFHT MegaCam MEF)
    @param version: should we scramble the 'p' or 'o' images?
    @param dry_run: if dry run then don't save back to VOSpace.
    @param force: if true then create scramble set, even if already exists.
    @param prefix: a string that will be pre-pended to the EXPNUM to get the filename, sometimes 'fk'.
    @return: None
    """

    # Get a list of the MJD values and then write a re-ordering of those into files with 's'
    # as their type instead of 'p' or 'o'
    mjds = []
    fobjs = []
    message = storage.SUCCESS
    if not (force or dry_run) and storage.get_status(
            task, prefix, expnums[0], version='s', ccd=ccd):
        logging.info("{} recorded as complete for {} ccd {}".format(
            task, expnums, ccd))
        return

    with storage.LoggingManager(task, prefix, expnums[0], ccd, version):
        try:
            for expnum in expnums:
                filename = storage.get_image(expnum, ccd=ccd, version=version)
                fobjs.append(fits.open(filename))
                # Pull out values to replace in headers.. must pull them
                # as otherwise we get pointers...
                mjds.append(fobjs[-1][0].header['MJD-OBS'])

            order = [0, 2, 1]
            for idx in range(len(fobjs)):
                logging.info(
                    "Flipping %d to %d" %
                    (fobjs[idx][0].header['EXPNUM'], expnums[order[idx]]))
                fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]]
                fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]]
                uri = storage.get_uri(expnums[order[idx]],
                                      ccd=ccd,
                                      version='s',
                                      ext='fits')
                scramble_file_name = os.path.basename(uri)
                if os.access(scramble_file_name, os.F_OK):
                    os.unlink(scramble_file_name)
                fobjs[idx].writeto(scramble_file_name)
                if not dry_run:
                    storage.copy(scramble_file_name, uri)
            logging.info(message)
        except Exception as ex:
            message = str(ex)
            logging.error(message)

        if not dry_run:
            storage.set_status(task,
                               prefix,
                               expnum,
                               version,
                               ccd,
                               status=message)

    return