def process_data_groups(data_groups, output_dir, ami_dir, array="LA", script=None):
    """Args:
    data_groups: Dictionary mapping groupname -> list of raw filenames
    output_dir: Folder where dataset group subfolders will be created.
    ami_dir: Top dir of the AMI ``reduce`` installation.
    array: 'LA' or 'SA' (Default: LA)
    """
    if not script:
        script = driveami.scripts.standard_reduction

    processed_files_info = {}
    for grp_name in sorted(data_groups.keys()):

        try:
            r = driveami.Reduce(ami_dir, array=array)
            files = data_groups[grp_name][driveami.keys.files]
            grp_dir = os.path.join(output_dir, grp_name, "ami")
            driveami.ensure_dir(grp_dir)
            logger.info("Calibrating rawfiles and writing to {}".format(grp_dir))
            for rawfile in files:
                try:
                    logger.info("Reducing rawfile %s ...", rawfile)
                    file_info = driveami.process_rawfile(rawfile, output_dir=grp_dir, reduce=r, script=script)
                except (ValueError, IOError) as e:
                    logger.exception("Hit exception reducing file: %s\n" "Exception reads:\n%s\n", rawfile, e)
                    continue
                # Also save the group assignment in the listings:
                file_info[driveami.keys.group_name] = grp_name
                processed_files_info[rawfile] = driveami.make_serializable(file_info)
        except Exception as e:
            logger.exception("Hit exception (probable timeout) reducing group: {}".format(grp_name))
            continue
    return processed_files_info
Example #2
0
def main():
    options = handle_args()
    grouped_by_id_filename = options.outfile + '_by_id.json'
    grouped_by_pointing_filename = options.outfile + '_by_pointing.json'
    metadata_filename = options.outfile + '_metadata.json'

    r = driveami.Reduce(options.amidir, options.amiversion, options.array)
    logger.info("Loading observation metadata.")
    r.load_obs_info()
    logger.info("Grouping observations by target ID")
    id_groups = r.group_obs_by_target_id()
    with open(grouped_by_id_filename, 'w') as f:
        driveami.save_rawfile_listing(id_groups, f)
    logger.info("Grouping targets by pointing")
    pointing_groups = r.group_target_ids_by_pointing(id_groups,
                                                     pointing_tolerance_in_degrees=0.5)
    with open(grouped_by_pointing_filename, 'w') as f:
        driveami.save_rawfile_listing(pointing_groups, f)

    with open(metadata_filename, 'w') as f:
        rawfile_dict = {fname: driveami.make_serializable(info) for fname, info
                        in r.files.iteritems()}
        driveami.save_rawfile_listing(rawfile_dict, f)

    return 0
    def test_list_files(self):
        self.assertEqual(len(self.reduce.files), 3)
        self.assertEqual(self.reduce.files.values()[0], {})

        self.reduce.load_obs_info()
        self.assertNotEqual(self.reduce.files.values()[0], {})

        s = StringIO.StringIO()
        rawfile_dict = {fname: driveami.make_serializable(info) for fname, info
                        in self.reduce.files.iteritems()}
        driveami.save_rawfile_listing(rawfile_dict, s)
def main():
    options = handle_args()
    grouped_by_id_filename = options.outfile + '_by_id.json'
    grouped_by_pointing_filename = options.outfile + '_by_pointing.json'
    metadata_filename = options.outfile + '_metadata.json'

    r = driveami.Reduce(options.amidir, options.amiversion, options.array)
    logger.info("Loading observation metadata.")
    r.load_obs_info()

    #Write file metadata
    with open(metadata_filename, 'w') as f:
        rawfile_dict = {fname: driveami.make_serializable(info) for fname, info
                        in r.files.iteritems()}
        if not options.rawtext:
            for _, fdict in rawfile_dict.iteritems():
                fdict.pop('raw_obs_listing_text')
        driveami.save_rawfile_listing(rawfile_dict, f)
    logger.info("Wrote file metadata listings to {}".format(metadata_filename))

    #Write listings grouped by ID
    logger.info("Grouping observations by target ID")
    id_groups = r.group_obs_by_target_id()
    with open(grouped_by_id_filename, 'w') as f:
        driveami.save_rawfile_listing(id_groups, f)
    logger.info("Wrote id-grouped file-listings to {}".format(grouped_by_id_filename))

    #Write listings grouped by pointing:
    logger.info("Grouping targets by pointing")
    pointing_groups = r.group_target_ids_by_pointing(id_groups,
                                                     pointing_tolerance_in_degrees=0.5)
    with open(grouped_by_pointing_filename, 'w') as f:
        driveami.save_rawfile_listing(pointing_groups, f)
    logger.info(
        "Wrote pointing-grouped file-listings to {}".format(
            grouped_by_pointing_filename))

    return 0