Esempio n. 1
0
def write_results(images, annotations, save_path):
    writer = get_coco_writer()
    for i, anns in annotations.items():
        img_h, img_w, img_path = images[i]
        if len(anns):
            image_id, _ = writer.add_frame(img_h, img_w, filename=img_path)
            for (bbox, category_id) in anns.values():
                writer.add_annotation(image_id, bbox, category_id)
    writer.write_result(save_path, verbose=True)
Esempio n. 2
0
            hor_crop_name, hor_crop_id = '_'.join(
                hor_crop_name[:-1]) + '.jpg', int(
                    hor_crop_name[-1].split('.')[0])
            img_id = file_name2imgid.get(hor_crop_name)
            if img_id is None:
                print(f'Not found pano for {image_path}')
                continue
            hor_crop_id2annotcats[img_id][hor_crop_id] = cls_id
            hor_crop_id2crop_path[img_id][hor_crop_id] = image_path

    # Create mapping from panoramas names to panoramas paths
    pan_name2path = get_pannames2paths(os.path.join(args.data_path, 'pans'))

    mappings = defaultdict(dict)

    writer = get_coco_writer()

    for img_id in tqdm(coco.imgs, desc="Mapping annotations to panoramas"):
        # Skip if no anotations
        if not hor_crop_id2annotcats[img_id]:
            continue
        ann_ids = coco.getAnnIds(imgIds=img_id, iscrowd=None)
        im_anns = coco.loadAnns(ann_ids)

        # Extract paths, n_cut for hor_crop
        hor_crop_path = coco.imgs[img_id]['file_name']
        n_hor_crop = int(hor_crop_path.split('_')[-1].split('.')[0])
        pan_name = pan_name_from_crop_name(os.path.split(hor_crop_path)[1])
        pan_path = pan_name2path[pan_name]

        if args.debug:
def main(args):
    pos_boxes = read_pos_boxes_file(args.pos_file, args.grid_radius)

    executor = PoolExecutor(max_workers=args.max_workers)
    m = mp.Manager()

    # Launch indexation
    pan_info_queue = QueueIterator(m.Queue(), batch_size=args.max_workers)
    get_panos_thread = Thread(target=get_panos,
                              daemon=True,
                              args=(pos_boxes, executor, pan_info_queue, args))
    get_panos_thread.start()
    # Give time to start indexing
    time.sleep(3)

    thetas = get_thetas(fov=args.fov, n_cuts=args.n_cuts_per_image)
    hor_maps = defaultdict(dict)

    pans_out_path = os.path.join(args.output_path, 'pans')
    hor_crop_out_path = os.path.join(args.output_path, 'hor_crops')
    classifier_out_path = os.path.join(args.output_path, 'hor_crops_infer')
    annotations_out_path = os.path.join(args.output_path,
                                        'hor_crops_annotations.json')

    # Load model configs
    det_config = load_model_config(args.detector_config)
    cls_config = load_model_config(args.classifier_config)

    detector = VinoModel(config=det_config, num_proc=args.detector_n_threads)

    classifier = VinoModel(config=cls_config, num_proc=args.detector_n_threads)

    writer = get_coco_writer()
    n_same, n_empty = 0, 0
    pbar = tqdm(total=0, desc='Downloading & Processing')
    for pan_infos in pan_info_queue:
        pbar.total = pan_info_queue.total_amount
        for pan_info in pan_infos:
            pbar.set_postfix(n_same=n_same, n_empty=n_empty)
            pan_id, lat, lon, year, month = (pan_info['panoid'],
                                             pan_info['lat'], pan_info['lon'],
                                             pan_info['year'],
                                             pan_info['month'])
            pano_name = f'{lat}_{lon}_{pan_id}_{month}_{year}'
            same_files = glob(
                os.path.join(pans_out_path, f'*{pan_id}_{month}_{year}*'))
            hor_crop_paths = glob(
                os.path.join(hor_crop_out_path, f'*{pan_id}_{month}_{year}*'))
            # If found panorama on disk, skip
            if same_files:
                n_same += 1
                pano_img = load_img(same_files[0])
            # If found horizontal crops on disk, skip downloading too
            elif hor_crop_paths and args.skip_processed:
                n_same += 1
                hor_crop_paths = {
                    int(os.path.splitext(hor_crop_path)[0].rsplit('_', 1)[1]):
                    hor_crop_path
                    for hor_crop_path in hor_crop_paths
                }
                pano_img = None
            else:
                pano_img = download_panorama_v5(pan_id)
                # If failed to download, skip
                if pano_img is None:
                    n_empty += 1
                    pbar.update(1)
                    continue
            for n_hor_crop, theta in enumerate(thetas):
                # If panorama is not downloaded, load the crop
                if pano_img is None:
                    hor_crop_path = hor_crop_paths.get(n_hor_crop)
                    if hor_crop_path is None:
                        continue
                    else:
                        hor_img = load_img(hor_crop_path)
                else:
                    # Get theta
                    if hor_maps[pano_img.shape].get(theta) is None:
                        hor_maps[pano_img.shape][theta] = get_mapping(
                            *pano_img.shape[:2], theta, args.phi, args.res_y,
                            args.res_x, args.fov)
                    hor_img = _get_crop(pano_img,
                                        *hor_maps[pano_img.shape][theta])
                hor_img_name = f'{pano_name}_{n_hor_crop}'
                hor_img_path = os.path.join(hor_crop_out_path,
                                            f'{hor_img_name}.jpg')

                is_predicted = process_image(detector, classifier, hor_img,
                                             hor_img_path, writer,
                                             classifier_out_path)
                if is_predicted:
                    os.makedirs(hor_crop_out_path, exist_ok=True)
                    save_img(hor_img_path, hor_img)
            pbar.update(1)
            # Save result every 500 pans
            if pbar.n % 500 == 0:
                writer.write_result(annotations_out_path)
    writer.write_result(annotations_out_path)
Esempio n. 4
0
def main(args, writer=None):
    pos_boxes = read_pos_boxes_file(args.pos_file, args.grid_radius)
    indexed_pan_ids = set()
    n_boxes = len(pos_boxes)

    thetas = get_thetas(fov=args.fov, n_cuts=args.n_cuts_per_image)
    hor_maps = defaultdict(dict)

    pans_out_path = os.path.join(args.output_path, 'pans')
    hor_crop_out_path = os.path.join(args.output_path, 'hor_crops')
    classifier_out_path = os.path.join(args.output_path, 'hor_crops_infer')
    annotations_out_path = os.path.join(args.output_path,
                                        'hor_crops_annotations.json')

    # Load model configs
    det_config = load_model_config(args.detector_config)
    cls_config = load_model_config(args.classifier_config)

    detector = VinoModel(config=det_config, num_proc=args.detector_n_threads)

    classifier = VinoModel(config=cls_config, num_proc=args.detector_n_threads)

    writer = writer or get_coco_writer()
    n_same, n_empty = 0, 0
    pbar = tqdm(total=0, desc='Indexing 0%')
    for i, pos_box in enumerate(pos_boxes, 1):
        grid = create_grid(*pos_box, step=args.step)
        pan_infos = get_grid_panoids(grid, closest=args.closest)
        # Filter by indexed_pan_ids and add to it if not present
        pan_infos = [
            indexed_pan_ids.add(p['panoid']) or p for p in pan_infos
            if p['panoid'] not in indexed_pan_ids
        ]
        # Filter by min year
        pan_infos = [p for p in pan_infos if p['year'] >= args.min_year]
        pbar.set_description(f'Indexing {i / n_boxes * 100:.2f}%')
        pbar.total += len(pan_infos)
        pbar.refresh()
        for pan_info in pan_infos:
            pbar.set_postfix(n_same=n_same, n_empty=n_empty)
            pan_id, lat, lon, year, month = (pan_info['panoid'],
                                             pan_info['lat'], pan_info['lon'],
                                             pan_info['year'],
                                             pan_info['month'])
            pano_name = f'{lat}_{lon}_{pan_id}_{month}_{year}'
            same_files = glob(
                os.path.join(pans_out_path, f'*{pan_id}_{month}_{year}*'))
            hor_crop_paths = glob(
                os.path.join(hor_crop_out_path, f'*{pan_id}_{month}_{year}*'))
            # If found panorama on disk, skip
            if same_files:
                n_same += 1
                pano_img = load_img(same_files[0])
            # If found horizontal crops on disk, skip downloading too
            elif hor_crop_paths and args.skip_processed:
                n_same += 1
                hor_crop_paths = {
                    int(os.path.splitext(hor_crop_path)[0].rsplit('_', 1)[1]):
                    hor_crop_path
                    for hor_crop_path in hor_crop_paths
                }
                pano_img = None
            else:
                pano_img = download_panorama_v5(pan_id)
                # If failed to download, skip
                if pano_img is None:
                    n_empty += 1
                    pbar.update(1)
                    continue
            for n_hor_crop, theta in enumerate(thetas):
                # If panorama is not downloaded, load the crop
                if pano_img is None:
                    hor_crop_path = hor_crop_paths.get(n_hor_crop)
                    if hor_crop_path is None:
                        continue
                    else:
                        hor_img = load_img(hor_crop_path)
                else:
                    # Get theta
                    if hor_maps[pano_img.shape].get(theta) is None:
                        hor_maps[pano_img.shape][theta] = get_mapping(
                            *pano_img.shape[:2], theta, args.phi, args.res_y,
                            args.res_x, args.fov)
                    hor_img = _get_crop(pano_img,
                                        *hor_maps[pano_img.shape][theta])
                hor_img_name = f'{pano_name}_{n_hor_crop}'
                hor_img_path = os.path.join(hor_crop_out_path,
                                            f'{hor_img_name}.jpg')

                is_predicted = process_image(detector, classifier, hor_img,
                                             hor_img_path, writer,
                                             classifier_out_path)
                if is_predicted:
                    os.makedirs(hor_crop_out_path, exist_ok=True)
                    save_img(hor_img_path, hor_img)
            pbar.update(1)
            # Save result every 500 pans
            if pbar.n % 500 == 0:
                writer.write_result(annotations_out_path)
    writer.write_result(annotations_out_path)