def main(): """main""" args = parse_arguments() output_image_path = Path(args.output_dir).joinpath("camera_images") output_gaze_path = Path(args.output_dir).joinpath("gazemap_images") output_image_path.mkdir(parents=True, exist_ok=True) output_gaze_path.mkdir(parents=True, exist_ok=True) input_dirs = [Path(input_dir) for input_dir in args.input_dirs] naming_data = init_naming_data(args.naming) print("Reading scenarios...") scenario_index = get_scenario_start_index(naming_data) num_previous_scenarios = scenario_index - 1 scenario_groups = [] for input_dir in input_dirs: image_files = get_files_with_suffix(input_dir, args.suffix) json_files = get_files_with_suffix(input_dir, config.LABELME_SUFFIX) scenario_grouper = ScenarioGrouper( scenario_index, input_dir.name, args.image_topics, image_files, json_files, ) if not image_files: print( "Could not find any image files in scenario %s with %s extension. " "Make sure you set the corrent suffix with --suffix." % (scenario_grouper.scenario_name, args.suffix), file=sys.stderr, ) sys.exit(1) if not scenario_grouper.is_valid: print( "Images of scenario %s are not aligned or of same length for topics %s.\n" "Run merge.py with --reindex to align your files" % (scenario_grouper.scenario_name, args.image_topics), file=sys.stderr, ) sys.exit(1) if FileGrouper.is_empty(scenario_grouper.image_groups): print( "None of the image files for scenario %s is matching a topic in %s" % (scenario_grouper.scenario_name, args.image_topics), file=sys.stderr, ) sys.exit(1) scenario_groups.append(scenario_grouper) scenario_index += 1 print("Write %s" % config.MVROI_NAMING_FILE) naming_data = append_naming_data(scenario_groups, naming_data) write_json( Path(args.output_dir).joinpath(config.MVROI_NAMING_FILE), naming_data) for scenario_group in tqdm(scenario_groups, desc="Preparing scenarios..."): size = prepare_scenario_group_images(scenario_group, output_image_path) prepare_scenario_group_gazemaps(scenario_group, size, output_gaze_path)
def main(): """Main""" args = parse_arguments() Path(args.output_dir).mkdir(parents=True, exist_ok=True) image_files = get_files_with_suffix(args.input_dir, args.suffix) json_files = get_files_with_suffix(args.input_dir, config.LABELME_SUFFIX) layout_json = [ json_files.pop(json_files.index(file)) for file in json_files if config.MVROI_LAYOUT_FILE == file.name ] if not layout_json: print( "Input folder does not contain a %s file." % config.MVROI_LAYOUT_FILE, file=sys.stderr, ) sys.exit(1) print( "Found %d %s images and %d label files in %s\n" % (len(image_files), args.suffix, len(json_files), args.input_dir) ) layout_data = read_json(layout_json[0]) if args.split_images: split_images(image_files, layout_data, args.output_dir) individual_json_files = split_json_data(json_files, layout_data, args.suffix) for path, data in tqdm(individual_json_files, desc="Writing json files..."): write_json(Path(args.output_dir).joinpath(path), data)
def __check_image_content(self, path_expected, path_actual): expected = get_files_with_suffix(path_expected, ".png") actual = get_files_with_suffix(path_actual, ".png") for exp, act in zip(expected, actual): img_exp = PIL.Image.open(exp) img_act = PIL.Image.open(act) self.assertIsNone(PIL.ImageChops.difference(img_exp, img_act).getbbox())
def test_reindex__res_reindex__equal_to_res_individual(self): shutil.copytree(self.PATH_REINDEX, TEST_OUTPUT_PATH) merge.main() expected = get_files_with_suffix(PATH_INDIVIDUAL, ".png") actual = get_files_with_suffix(TEST_OUTPUT_PATH, ".png") expected.sort() actual.sort() self.assertEqual([exp.name for exp in expected], [act.name for act in actual])
def __check_hdf5_content(self, path_expected, path_actual): expected = get_files_with_suffix(path_expected, ".h5") actual = get_files_with_suffix(path_actual, ".h5") for exp, act in zip(expected, actual): self.assertEqual(exp.stat().st_size, act.stat().st_size) h5_exp = h5py.File(exp, "r") h5_act = h5py.File(act, "r") self.assertEqual(h5_exp["/"].name, h5_act["/"].name) h5_exp.close() h5_act.close()
def __check_json_content(self, path_expected, path_actual): expected = get_files_with_suffix(path_expected, ".json") actual = get_files_with_suffix(path_actual, ".json") for exp, act in zip(expected, actual): json_exp = read_json(exp) json_act = read_json(act) if exp.name == act.name == config.MVROI_LAYOUT_FILE: self.assertEqual(json_exp, json_act) elif exp.name == act.name == config.MVROI_NAMING_FILE: self.assertEqual(json_exp, json_act) else: for shape_exp, shape_act in zip(json_exp["shapes"], json_exp["shapes"]): self.assertAlmostEqual(shape_exp, shape_act)
def test_get_files_with_suffix__input_path__ascending_order(self): result = get_files_with_suffix("", ".json") expected = [ Path(element) for element in sorted(self.MULTI_NAME_TEST_CONTENT) ] self.assertEqual(len(expected), len(result)) self.assertEqual(expected, result)
def main(): """main""" args = parse_arguments() width, height = parse_resolution(args.res) Path(args.output_dir).mkdir(parents=True, exist_ok=True) layout_data = create_layout_data(args.image_topics, args.images_per_row, width, height) if not (args.hdf5 or args.reindex): print("Write layout.json") write_json( Path(args.output_dir).joinpath(config.MVROI_LAYOUT_FILE), layout_data) image_files = get_files_with_suffix(args.input_dir, args.suffix) json_files = get_files_with_suffix(args.input_dir, ".json", ignore=config.MVROI_LAYOUT_FILE) print("Found %d %s images and %d label files in %s\n" % (len(image_files), args.suffix, len(json_files), args.input_dir)) if args.reindex: print("Reindexing files...") reindex_files(image_files, args.image_topics) return print("Grouping files for merging...") image_grouper = FileGrouper(layout_data, image_files, args.image_topics) json_grouper = FileGrouper(layout_data, json_files, args.image_topics) if not (image_grouper.is_valid and json_grouper.is_valid): print( "Image or json files not aligned or of same length for topics %s.\n" "Run with --reindex to align your files" % args.image_topics, file=sys.stderr, ) sys.exit(1) print("Found %d image and %d json groups to merge\n" % (len(image_grouper.merge_groups), len(json_grouper.merge_groups))) if args.hdf5: hdf5_merge(args, image_grouper, json_grouper) else: file_merge(args, image_grouper, json_grouper, args.suffix)
def main(): """main""" args = parse_arguments() Path(args.output_dir).mkdir(parents=True, exist_ok=True) image_files = get_files_with_suffix(args.input_dir, config.BDDA_IMAGE_SUFFIX) for image_file in tqdm(image_files, desc="Generating fake gazemaps..."): generate_fake_gazemaps(image_file, args.output_dir)
def main(): """main""" args = parse_arguments() Path(args.output_dir).mkdir(parents=True, exist_ok=True) print("Reading %s..." % config.MVROI_NAMING_FILE) naming_data = read_json(args.naming.name) gazemaps = get_files_with_suffix(args.input_dir, args.suffix) gazemap_groups = FileGrouper.group_files_by_keys(gazemaps, naming_data.keys()) grouped_pairs = get_path_pair_gazemap_groups(gazemap_groups, naming_data, args.output_dir) print( "Found %d %s gazemaps in %d groups" % (len(gazemaps), config.BDDA_IMAGE_SUFFIX, len(gazemap_groups.keys()))) bar = tqdm(grouped_pairs.items()) for key, pair_group in bar: bar.set_description("Reformatting sequence for index %s..." % key) reformat_gaze_map_sequence(pair_group)
def test_get_files_with_suffix__only_layout_json_but_ignored__no_files( self): result = get_files_with_suffix("", ".json", ignore="layout.json") self.assertFalse(result)
def test_get_files_with_suffix__only_layout_json__one_file(self): result = get_files_with_suffix("", ".json") self.assertEqual(1, len(result))
def test_get_files_with_suffix__input_path__start_with_input_path(self): result = get_files_with_suffix("test", ".json") self.assertEqual(2, len(result)) self.assertEqual(Path("test"), Path(result[0]).parent)
def test_get_files_with_suffix__json_suffix__two_files(self): result = get_files_with_suffix("", ".json") self.assertEqual(2, len(result)) self.assertEqual(Path("."), Path(result[0]).parent)
def test_get_files_with_suffix__not_existing_suffix__no_files(self): result = get_files_with_suffix("", ".jpg") self.assertFalse(result)
def test_get_files_with_suffix__empty_dir__no_files(self): result = get_files_with_suffix("", ".png") self.assertFalse(result)