def setUp(self):
        # test files
        file_inventory = './test/files/test_inventory.csv'
        file = './test/files/test_action_list.csv'
        self.inventory = read_image_inventory(file_inventory)
        time_deltas = calculate_time_deltas(self.inventory, flags)
        update_inventory_with_capture_data(self.inventory, time_deltas)
        image_to_capture = group_images_into_captures(self.inventory, flags)
        update_inventory_with_capture_data(self.inventory, image_to_capture)
        update_inventory_with_capture_id(self.inventory)
        update_inventory_with_image_names(self.inventory)
        self.action_list = read_image_inventory(file, unique_id=None)
        self.captures = OrderedDict()
        for v in self.inventory.values():
            self.captures[v['image_name']] = v
        self.actions = generate_actions(self.action_list, self.captures)

        @patch('pre_processing.actions.os.remove')
        def mock_apply_action(image_data, action_dict, flags, mock_remove):
            apply_action(image_data, action_dict, flags)

        for action_obj in self.actions:
            mock_apply_action(self.captures[action_obj.image], action_obj,
                              flags)

        self.captures_updated = select_valid_images(self.captures)

        # re-calculate time_deltas
        time_deltas = calculate_time_deltas(self.captures_updated, flags)
        update_inventory_with_capture_data(self.captures_updated, time_deltas)

        # update image to capture association
        image_to_capture = group_images_into_captures(self.captures_updated,
                                                      flags)
        update_inventory_with_capture_data(self.captures_updated,
                                           image_to_capture)

        update_inventory_with_capture_id(self.captures_updated)

        update_time_checks_inventory(self.captures_updated, flags)
Esempio n. 2
0
    parser.add_argument("--captures", type=str, required=True)
    parser.add_argument("--log_dir", type=str, default=None)
    parser.add_argument("--log_filename", type=str, default='generate_actions')
    args = vars(parser.parse_args())

    # check existence of root dir
    if not os.path.isfile(args['action_list']):
        raise FileNotFoundError(
            "action_list {} does not exist -- must be a file".format(
                args['action_list']))

    if not os.path.isfile(args['captures']):
        raise FileNotFoundError(
            "captures {} does not exist -- must be a file".format(
                args['captures']))

    # logging
    set_logging(args['log_dir'], args['log_filename'])
    logger = logging.getLogger(__name__)

    # read files
    captures = read_image_inventory(args['captures'], unique_id='image_name')
    action_list = read_image_inventory(args['action_list'], unique_id=None)

    actions_inventory = generate_actions(action_list, captures)

    # Export actions list
    df = pd.DataFrame.from_records(actions_inventory, columns=Action._fields)
    df.to_csv(args['actions_to_perform_csv'], index=False)
    set_file_permission(args['actions_to_perform_csv'])
    if not os.path.isfile(args['actions_to_perform']):
        raise FileNotFoundError(
            "actions_to_perform {} does not exist -- must be a file".format(
                args['actions_to_perform']))

    if not os.path.isfile(args['captures']):
        raise FileNotFoundError(
            "captures {} does not exist -- must be a file".format(
                args['captures']))

    # logging
    set_logging(args['log_dir'], args['log_filename'])
    logger = logging.getLogger(__name__)

    logger.info("Reading actions from {}".format(args['actions_to_perform']))
    actions = read_image_inventory(args['actions_to_perform'], unique_id=None)

    logger.info("Reading captures from {}".format(args['captures']))
    captures = read_image_inventory(args['captures'], unique_id='image_name')

    try:
        for _id, action in actions.items():
            apply_action(captures[action['image']], action, flags)
        logger.info("Successfully applied actions")
    except Exception as e:
        logger.error("Failed to apply actions", exc_info=True)

    export_inventory_to_csv(captures, args['captures'])

    logger.info("Updated captures file at: {}".format(args['captures']))
Esempio n. 4
0
    parser.add_argument("--plot_timelines", action='store_true')
    args = vars(parser.parse_args())

    # check existence of root dir
    if not os.path.isfile(args['captures']):
        raise FileNotFoundError(
            "captures {} does not exist -- must be a file".format(
                args['captures']))

    # logging
    set_logging(args['log_dir'], args['log_filename'])
    logger = logging.getLogger(__name__)

    # read grouped data
    inventory = read_image_inventory(
        args['captures'],
        unique_id='image_path_original')

    header = list(inventory[list(inventory.keys())[0]].keys())

    # check columns
    check_columns = [x for x in header if x.startswith('image_check__')]
    to_delete_checks = \
        ['image_check__{}'.format(x)
         for x in flags['image_checks_propose_delete']]
    to_invalidate_checks = \
        ['image_check__{}'.format(x)
         for x in flags['image_checks_propose_invalidate']]
    time_checks = \
        ['image_check__{}'.format(x)
         for x in flags['image_checks_propose_time']]
Esempio n. 5
0
 def setUp(self):
     file = './test/files/test_inventory.csv'
     self.inventory = read_image_inventory(file)
     time_deltas = calculate_time_deltas(self.inventory, flags)
     update_inventory_with_capture_data(self.inventory, time_deltas)
    if not os.path.isdir(args['images_to_match_path']):
        raise FileNotFoundError(
            "images_to_match_path: {} must be a directory".format(
                args['images_to_match_path']))

    # logging
    set_logging()

    # find all images
    images_to_find = list_pictures(args['images_to_match_path'],
                                   ext=('jpg', 'jpeg'))

    logger.info("Found {} images in {}".format(len(images_to_find),
                                               args['images_to_match_path']))

    captures = read_image_inventory(args['captures_csv'],
                                    unique_id='image_path')

    logger.info("Read {} with {} images".format(args['captures_csv'],
                                                len(captures.keys())))

    images_to_search_in = list(captures.keys())

    matches = {path: [] for path in images_to_find}

    logger.info("Get file sizes for images to find...")
    to_find_size = path_to_size(images_to_find)
    logger.info("Get file sizes for images to search in...")
    to_search_size = path_to_size(images_to_search_in)

    # invert dictionary: size -> file names list
    size_to_find = dict()

def rename_images_in_inventory(inventory):
    """ Rename all images in inventory """
    source_paths = list()
    dest_paths = list()
    for data in inventory.values():
        source_paths.append(data['image_path_original'])
        dest_paths.append(data['image_path'])
    rename_files(source_paths, dest_paths)


if __name__ == '__main__':

    # Parse command line arguments
    parser = argparse.ArgumentParser()
    parser.add_argument("--inventory", type=str, required=True)
    parser.add_argument("--log_dir", type=str, default=None)
    parser.add_argument("--log_filename", type=str, default='rename_images')
    args = vars(parser.parse_args())

    # logging
    set_logging(args['log_dir'], args['log_filename'])
    logger = logging.getLogger(__name__)

    inventory = read_image_inventory(args['inventory'])

    logger.info("Starting to rename images")
    rename_images_in_inventory(inventory)
    logger.info("Finished renaming images")