Example #1
0
def main():
    tqdm.write(f'Loading pairs')

    with session_scope() as sess:
        pairs, count = controllers.fetch_pairs_default(sess)

    sn_pairs = []
    hm_pairs = []

    for pair in tqdm(pairs):
        if pair.shape.source == 'shapenet':
            sn_pairs.append(pair)
        elif pair.shape.source == 'hermanmiller':
            hm_pairs.append(pair)

    turk_inf_dir = args.out_dir
    turk_inf_dir.mkdir(parents=True, exist_ok=True)

    sn_samples = random.sample(sn_pairs, 1000)
    hm_samples = random.sample(hm_pairs, 500)

    with open(turk_inf_dir / 'shapenet_pairs.json', 'w') as f:
        json.dump([p.id for p in sn_samples], f)

    with open(turk_inf_dir / 'hermanmiller_pairs.json', 'w') as f:
        json.dump([p.id for p in hm_samples], f)
Example #2
0
def main():
    args.output_dir.mkdir(parents=True, exist_ok=True)
    input_paths = list(args.input_dir.glob('*.jpg'))
    print("Fetching pairs.")
    with session_scope() as sess:
        pairs, _ = controllers.fetch_pairs_default(sess)
        pairs_by_id = {p.id: p for p in pairs}

    input_tups = []
    for input_path in tqdm(input_paths):
        pair_id = int(input_path.name.split('.')[0])
        if pair_id not in pairs_by_id:
            continue
        pair = pairs_by_id[pair_id]
        input_tups.append((pair, input_path))

    pbar = tqdm(total=len(input_tups))
    with multiprocessing.Pool(processes=args.num_workers) as pool:
        for i in pool.imap_unordered(worker, input_tups):
            pbar.update(1)
Example #3
0
def main():
    out_dir = args.out_dir
    out_dir.mkdir(exist_ok=True, parents=True)

    with session_scope() as sess:
        pairs, count = controllers.fetch_pairs_default(sess)
        materials = sess.query(models.Material).all()
        mat_by_id = {m.id: m for m in materials}

    pairs = [
        pair for pair in pairs
        if args.overwrite or not (Path(out_dir, f'{pair.id}.json').exists())
    ]
    print(len(pairs))

    # pairs = []
    mat_id_by_subst = defaultdict(list)

    tqdm.write(f"Fetching shapes and their pairs.")
    with session_scope() as sess:
        materials = sess.query(models.Material).filter_by(enabled=True).all()
        for material in materials:
            mat_id_by_subst[material.substance].append(material.id)

        # shapes, _ = controllers.fetch_shapes_with_pairs(sess)
        # for shape in tqdm(shapes):
        #     _pairs = shape.get_topk_pairs(config.INFERENCE_TOPK,
        #                                  config.INFERENCE_MAX_DIST)
        #     for pair in _pairs:
        #         pair.exemplar, pair.shape
        #     pairs.extend(_pairs)

    pool = multiprocessing.Pool(processes=args.num_workers)

    tqdm.write(f"Processing {len(pairs)} pairs")
    pbar = tqdm(total=len(pairs))
    for i in pool.imap_unordered(
            partial(worker, mat_id_by_subst=mat_id_by_subst), pairs):
        pbar.update(1)
def main():
    warnings.filterwarnings('ignore', '.*output shape of zoom.*')

    filters = []
    if args.category:
        filters.append(ExemplarShapePair.shape.has(category=args.category))

    with session_scope() as sess:
        pairs, count = controllers.fetch_pairs_default(sess, filters=filters)
        # shapes, _ = controllers.fetch_shapes_with_pairs(sess)
        # print(f"Loading shapes")
        # pairs = []
        # for shape in tqdm(shapes):
        #     pairs.extend(shape.get_topk_pairs(config.INFERENCE_TOPK,
        #                                       config.INFERENCE_MAX_DIST))

        # pairs, count = controllers.fetch_pairs(
        #     sess,
        #     by_shape=True,
        #     order_by=ExemplarShapePair.shape_id.asc(),
        # )

        print(f"Fetched {len(pairs)} pairs")

        pairs = [
            pair for pair in pairs
            # if not pair.data_exists(config.SHAPE_REND_SEGMENT_MAP_NAME)
            if not pair.data_exists(config.PAIR_FG_BBOX_NAME)
        ]

        print(f"Generating renderings for {len(pairs)} pairs.")

        pbar = tqdm(pairs)
        for pair in pbar:
            # if pair.data_exists(config.SHAPE_REND_SEGMENT_MAP_NAME):
            if pair.data_exists(config.PAIR_FG_BBOX_NAME):
                continue
            render_model_exemplars(pbar, pair)
Example #5
0
def main():
    checkpoint_path = Path(args.checkpoint_path)
    checkpoint_name = checkpoint_path.parent.parent.name
    snapshot_path = checkpoint_path.parent.parent.parent.parent / 'snapshots' / checkpoint_name / 'snapshot.json'

    with snapshot_path.open('r') as f:
        mat_id_to_label = json.load(f)['mat_id_to_label']
        label_to_mat_id = {v: k for k, v in mat_id_to_label.items()}

    with (checkpoint_path.parent / 'model_params.json').open('r') as f:
        params_dict = json.load(f)

    print(f'Loading checkpoint from {checkpoint_path}')
    checkpoint = torch.load(checkpoint_path)

    if not args.out_name:
        # TOOD: remove this ugly thing. (There's no reason to the +1 we did)
        out_name = str(checkpoint['epoch'] - 1)
    else:
        out_name = args.out_name

    model_name = checkpoint_path.parent.name
    out_dir = (checkpoint_path.parent.parent.parent.parent / 'inference' /
               checkpoint_name / model_name / str(out_name))

    model = RendNet3(num_classes=len(label_to_mat_id) + 1,
                     num_roughness_classes=20,
                     num_substances=len(SUBSTANCES),
                     base_model=resnet.resnet18(pretrained=False),
                     output_substance=True,
                     output_roughness=True)
    model.load_state_dict(checkpoint['state_dict'])

    # model = RendNet3.from_checkpoint(checkpoint)
    model.train(False)
    model = model.cuda()

    yy = input(f'Will save to {out_dir!s}, continue? (y/n): ')
    if yy != 'y':
        return

    out_dir.mkdir(exist_ok=True, parents=True)

    print(f'Loading pairs')
    with session_scope() as sess:
        pairs, count = controllers.fetch_pairs_default(sess)
        materials = sess.query(models.Material).all()
        mat_by_id = {m.id: m for m in materials}

    pbar = tqdm(pairs)
    for pair in pbar:
        out_path = Path(out_dir, f'{pair.id}.json')
        if not args.overwrite and out_path.exists():
            continue

        if not pair.data_exists(config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME_OLD):
            continue
        pbar.set_description(f'Pair {pair.id}')

        exemplar = pair.exemplar
        shape = (224, 224)
        exemplar_im = resize(pair.exemplar.load_cropped_image(), shape)
        # if not exemplar.data_exists(exemplar.get_image_name(shape)):
        #     exemplar_im = resize(pair.exemplar.load_cropped_image(), shape)
        #     exemplar.save_data(exemplar.get_image_name(shape), exemplar_im)
        # else:
        #     exemplar_im = exemplar.load_data(exemplar.get_image_name(shape))

        segment_map = pair.load_data(
            config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME_OLD) - 1

        vis.image(exemplar_im.transpose((2, 0, 1)), win='exemplar-image')

        result_dict = {'pair_id': pair.id, 'segments': {}}

        for seg_id in [s for s in np.unique(segment_map) if s >= 0]:
            seg_mask = (segment_map == seg_id)
            topk_dict = compute_topk(label_to_mat_id, model, exemplar_im,
                                     seg_mask)
            result_dict['segments'][str(seg_id)] = topk_dict

        with open(Path(out_path), 'w') as f:
            json.dump(result_dict, f, indent=2)
Example #6
0
def main():
    checkpoint_path = args.checkpoint_path
    base_dir = checkpoint_path.parent.parent.parent.parent
    snapshot_name = checkpoint_path.parent.parent.name
    lmdb_dir = (base_dir / 'lmdb' / snapshot_name)
    with (lmdb_dir / 'meta.json').open('r') as f:
        meta_dict = json.load(f)
        mat_id_to_label = meta_dict['mat_id_to_label']
        label_to_mat_id = {v: k for k, v in mat_id_to_label.items()}

    with (checkpoint_path.parent / 'model_params.json').open('r') as f:
        model_params = json.load(f)

    color_binner = None
    if 'color_hist_space' in model_params:
        color_binner = ColorBinner(
            space=model_params['color_hist_space'],
            shape=tuple(model_params['color_hist_shape']),
            sigma=tuple(model_params['color_hist_sigma']),
        )

    print(f'Loading checkpoint from {checkpoint_path!s}')
    checkpoint = torch.load(checkpoint_path)

    if not args.out_name:
        # TODO: remove this ugly thing. (There's no reason to the +1 we did)
        out_name = str(checkpoint['epoch'] - 1)
    else:
        out_name = args.out_name

    model_name = checkpoint_path.parent.name
    out_dir = (base_dir / 'inference' / snapshot_name / model_name / out_name)

    model = RendNet3.from_checkpoint(checkpoint)
    model.train(False)
    model = model.cuda()

    yy = input(f'Will save to {out_dir!s}, continue? (y/n): ')
    if yy != 'y':
        return

    out_dir.mkdir(exist_ok=True, parents=True)

    filters = []
    if args.category:
        filters.append(ExemplarShapePair.shape.has(category=args.category))

    print(f'Loading pairs')
    with session_scope() as sess:
        pairs, count = controllers.fetch_pairs_default(sess, filters=filters)
        materials = sess.query(models.Material).all()
        mat_by_id = {m.id: m for m in materials}

    pairs = [
        pair for pair in pairs
        if args.overwrite or not (Path(out_dir, f'{pair.id}.json').exists())
    ]

    pbar = tqdm(pairs)
    for pair in pbar:
        out_path = Path(out_dir, f'{pair.id}.json')
        if not args.overwrite and out_path.exists():
            continue

        if not pair.data_exists(config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME):
            tqdm.write(f'clean segment map not exists')
            continue
        pbar.set_description(f'Pair {pair.id}')

        exemplar = pair.exemplar
        shape = (224, 224)
        exemplar_im = pair.exemplar.load_cropped_image()
        exemplar_im = skimage.transform.resize(exemplar_im,
                                               shape,
                                               anti_aliasing=True,
                                               order=3,
                                               mode='constant',
                                               cval=1)
        # if not exemplar.data_exists(exemplar.get_image_name(shape)):
        #     exemplar_im = resize(pair.exemplar.load_cropped_image(),
        #                          shape, order=3)
        #     exemplar.save_data(exemplar.get_image_name(shape), exemplar_im)
        # else:
        #     exemplar_im = exemplar.load_data(exemplar.get_image_name(shape))

        segment_map = pair.load_data(
            config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME) - 1
        substance_map = pair.exemplar.load_data(config.EXEMPLAR_SUBST_MAP_NAME)
        substance_map = resize(substance_map, segment_map.shape, order=0)

        vis.image(exemplar_im.transpose((2, 0, 1)), win='exemplar-image')

        result_dict = {'pair_id': pair.id, 'segments': {}}

        subst_id_by_seg_id = compute_segment_substances(
            pair,
            return_ids=True,
            segment_map=segment_map,
            substance_map=substance_map)

        for seg_id in [s for s in np.unique(segment_map) if s >= 0]:
            seg_mask = (segment_map == seg_id)
            topk_dict = compute_topk(
                label_to_mat_id,
                model,
                exemplar_im,
                seg_mask,
                minc_substance=SUBSTANCES[subst_id_by_seg_id[seg_id]],
                color_binner=color_binner,
                mat_by_id=mat_by_id)
            result_dict['segments'][str(seg_id)] = topk_dict

        with open(Path(out_path), 'w') as f:
            json.dump(result_dict, f, indent=2)
Example #7
0
def main():
    print(f"Initializing MATLAB engine.")
    engine = matlab.engine.start_matlab()
    engine.addpath(str(siftflow_path))
    engine.addpath(str(siftflow_path / 'mexDenseSIFT'))
    engine.addpath(str(siftflow_path / 'mexDiscreteFlow'))

    warnings.simplefilter("error", UserWarning)

    filters = []
    if args.category:
        filters.append(ExemplarShapePair.shape.has(category=args.category))

    with session_scope() as sess:
        pairs, count = controllers.fetch_pairs_default(sess, filters=filters)
        # pairs, count = controllers.fetch_pairs(
        #     sess,
        #     by_shape=True,
        #     order_by=ExemplarShapePair.shape_id.asc(),
        # )

        print(f"Fetched {count} pairs")

        pairs = [
            pair for pair in pairs
            if (pair.data_exists(config.SHAPE_REND_SEGMENT_MAP_NAME)
                and not pair.data_exists(config.FLOW_DATA_NAME))
        ]

        print(f"Computing flows for {len(pairs)} pairs.")

        pbar = tqdm(pairs)
        pair: ExemplarShapePair
        for pair in pbar:
            pbar.set_description(f'Pair {pair.id}')
            # if not pair.exemplar.data_exists(config.EXEMPLAR_SUBST_MAP_NAME, type='numpy'):
            #     logger.warning('pair %d does not have substance map', pair.id)
            #     continue

            if not pair.data_exists(config.SHAPE_REND_SEGMENT_MAP_NAME):
                print(f'Pair {pair.id} does not have segment map')
                continue

            if pair.data_exists(config.FLOW_DATA_NAME):
                continue

            exemplar_im = transform.resize(pair.exemplar.load_cropped_image(),
                                           config.SHAPE_REND_SHAPE,
                                           anti_aliasing=True,
                                           mode='reflect')
            seg_vis = pair.load_data(config.SHAPE_REND_SEGMENT_VIS_NAME)

            vx, vy = compute_silhouette_flow(engine, pair)
            flow_vis = visualize_flow(vx, vy)

            vis.image(flow_vis.transpose((2, 0, 1)),
                      win='sil-flow',
                      opts={'title': 'sil-flow'})
            vis.image(
                ((exemplar_im + apply_flow(seg_vis, vx, vy)) / 2).transpose(
                    (2, 0, 1)),
                win='sil-flow-applied',
                opts={'title': 'sil-flow-applied'})

            # vx, vy = compute_phong_flow(engine, exemplar_im, phong_im)
            #
            # flow_vis = visualize_flow(vx, vy)
            # vis.image(flow_vis.transpose((2, 0, 1)),
            #           win='phong-flow',
            #           opts={'title': 'phong-flow'})
            # vis.image(
            #     ((exemplar_im + apply_flow(seg_vis, vx, vy))/2).transpose((2, 0, 1)),
            #     win='phong-flow-applied',
            #     opts={'title': 'phong-flow-applied'})

            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                pair.save_data(config.FLOW_VIS_DATA_NAME, flow_vis)
            pair.save_data(config.FLOW_DATA_NAME, np.dstack((vx, vy)))