def parmap_lists(f, xs_list, j=cpu_count() // 2, chunksize=1, pool=ThreadPool): """ Map over a list of lists in parallel by flattening then splitting at the end""" cam_lengths = map_list(len, xs_list) xs = concat_lists(xs_list) results = parmap_list(f, xs, j=j, chunksize=chunksize, pool=pool) return split_list(results, cam_lengths)
def calibrate_intrinsic(args): paths = setup_paths(args.paths) setup_logging(args.runtime.log_level, [], log_file=paths.log_file) info(pformat_struct(args)) image_path = os.path.expanduser(args.paths.image_path) info(f"Finding images in {image_path}") camera_images = find_camera_images(image_path, args.paths.cameras, args.paths.camera_pattern, matching=False) image_counts = { k: len(files) for k, files in zip(camera_images.cameras, camera_images.filenames) } info("Found camera directories with images {}".format(image_counts)) board_names, boards = split_dict( find_board_config(image_path, args.paths.boards)) info("Loading images..") images = image.detect.load_images(camera_images.filenames, prefix=camera_images.image_path, j=args.runtime.num_threads) image_sizes = map_list(common_image_size, images) info({ k: image_size for k, image_size in zip(camera_images.cameras, image_sizes) }) cache_key = struct(boards=boards, image_sizes=image_sizes, filenames=camera_images.filenames) detected_points = detect_boards_cached(boards, images, paths.detections, cache_key, j=args.runtime.num_threads) cameras, errs = calibrate_cameras(boards, detected_points, image_sizes, model=args.camera.distortion_model, fix_aspect=args.camera.fix_aspect, has_skew=args.camera.allow_skew, max_images=args.camera.limit_intrinsic) for name, camera, err in zip(camera_images.cameras, cameras, errs): info(f"Calibrated {name}, with RMS={err:.2f}") info(camera) info("") info(f"Writing single calibrations to {paths.calibration_file}") export_single(paths.calibration_file, cameras, camera_images.cameras, camera_images.filenames)
def map_lists(f, xs_list, j=len(os.sched_getaffinity(0)) // 2, chunksize=1, pool=ThreadPool): """ Map over a list of lists in parallel by flattening then splitting at the end""" cam_lengths = map_list(len, xs_list) flat_files = concat_lists(xs_list) with pool(processes=j) as pool: iter = pool.imap(f, flat_files, chunksize=chunksize) results = list(tqdm(iter, total=len(flat_files))) return split_list(results, cam_lengths)
def _load_images(self, j=cpu_count()): assert self.filenames is not None, "_load_images: no filenames set" info("Loading images..") self.images = image.detect.load_images( self.filenames, j=j, prefix=self.image_path) self.image_size = map_list(common_image_size, self.images) info(f"Loaded {self.sizes.image * self.sizes.camera} images") info( {k: image_size for k, image_size in zip( self.names.camera, self.image_size)})
def add_camera_images(self, camera_images, j=cpu_count()): check_camera_images(camera_images) self.names = self.names._extend( camera=camera_images.cameras, image=camera_images.image_names) self.filenames = camera_images.filenames self.image_path = camera_images.image_path if 'images' in camera_images: self.images = camera_images.images self.image_size = map_list(common_image_size, self.images) else: self._load_images(j=j)