Exemplo n.º 1
0
def main():
    paths = []
    for ext in ('*.3ds', '*.3DS'):
        paths.extend(args.models_dir.glob(ext))

    for path in paths:
        with session_scope() as sess:
            shapes = sess.query(models.Shape).filter_by(source_id=path.name).all()
        print([s for s in shapes])

    app = brender.Brender()

    pbar = tqdm(paths)
    for path in pbar:
        pbar.set_description(f'{path}')

        with session_scope() as sess:
            shapes = sess.query(models.Shape).filter_by(source_id=path.name).all()

        if len(shapes) == 0:
            continue

        for shape in shapes:
            app.init()
            try:
                register_shape(app, shape, path)
            except Exception:
                raise
Exemplo n.º 2
0
async def show_prcs(request):
    resultset_id = request.match_info.get('resultset_id')

    with session_scope() as sess:
        resultset = sess.query(models.ResultSet).get(resultset_id)
        snapshot_name = resultset.snapshot_name
        model_name = resultset.model_name
        epoch = resultset.inference_name

    page = int(request.query.get('page', 0))
    page_size = int(request.query.get('page_size', 100))
    shuffle = request.query.get('shuffle', 'false') == 'true'
    max_dist = int(request.query.get('max_dist', config.INFERENCE_MAX_DIST))
    topk = int(request.query.get('topk', config.INFERENCE_TOPK))

    pair_ids = request.query.get('pair_ids', None)
    if pair_ids is not None:
        pair_ids = pair_ids.replace(' ', '').strip(', ')
        pair_ids = [int(i) for i in pair_ids.split(',')]

    filters = []
    if pair_ids:
        filters.append(models.ExemplarShapePair.id.in_(pair_ids))

    if shuffle:
        order_by = func.random()
    else:
        order_by = ExemplarShapePair.shape_id.asc()

    with session_scope() as sess:
        pairs, count = controllers.fetch_pairs(sess,
                                               page=page,
                                               page_size=page_size,
                                               max_dist=max_dist,
                                               order_by=order_by,
                                               by_shape_topk=topk,
                                               by_shape=True,
                                               filters=filters)

    n_pages = int(math.ceil(count / page_size))

    inference_dir = (config.BRDF_CLASSIFIER_DIR_REMOTE / 'inference' /
                     snapshot_name / model_name / epoch)
    print(inference_dir)

    return {
        'inference_dir': inference_dir,
        'snapshot_name': snapshot_name,
        'model_name': model_name,
        'epoch': epoch,
        'cur_page': page,
        'page_size': page_size,
        'n_total': count,
        'n_pages': n_pages,
        'pairs': pairs,
        'resultset_id': resultset_id,
    }
Exemplo n.º 3
0
def main():
    with session_scope() as sess:
        exemplars = sess.query(models.Exemplar).order_by(
            models.Exemplar.id).filter_by(exclude=False).all()

    print(f"Fetched {len(exemplars)} exemplars. Loading features...")

    feats = []
    for e1 in tqdm(exemplars):
        f1 = e1.load_data(config.EXEMPLAR_ALIGN_DATA_NAME)
        feats.append(f1)

    feats = torch.tensor(feats).squeeze()
    if args.cuda:
        feats = feats.cuda()

    print(f"Computed {feats.size(0)} features each with {feats.size(1)} dims.")
    print(f"Computing duplicates...")
    duplicates = defaultdict(list)
    duplicate_dists = defaultdict(list)

    for i, f in enumerate(tqdm(feats)):
        dists = (feats -
                 f.unsqueeze(0).expand(*feats.size())).pow(2).sum(dim=1)
        dists[i] = 99999
        duplicate_inds = torch.nonzero(dists < 0.1)
        if len(duplicate_inds) > 0:
            duplicates[i].extend(duplicate_inds[:, 0].tolist())
            duplicate_dists[i].extend(dists[duplicate_inds][:, 0].tolist())

    print(f"Found duplicates from {len(duplicates)} exemplars.")
    yy = input("Commit, this cannot be undone (easily)? (y/n) ")
    if yy != 'y':
        return

    with session_scope() as sess:
        for i, dup_list in duplicates.items():
            e1 = exemplars[i]

            # Refetch from DB and check if it's already excluded.
            # If th
            e1 = sess.query(models.Exemplar).get(e1.id)
            if e1.exclude:
                continue

            for j in dup_list:
                e2 = exemplars[j]
                print(f'Set exemplar {e2.id}.exclude = True')
                sess.query(models.Exemplar).get(e2.id).exclude = True
                sess.commit()
Exemplo n.º 4
0
def main():
    tqdm.write(f'Loading pairs')

    with session_scope() as sess:
        pairs, count = controllers.fetch_pairs_default(sess)

    sn_pairs = []
    hm_pairs = []

    for pair in tqdm(pairs):
        if pair.shape.source == 'shapenet':
            sn_pairs.append(pair)
        elif pair.shape.source == 'hermanmiller':
            hm_pairs.append(pair)

    turk_inf_dir = args.out_dir
    turk_inf_dir.mkdir(parents=True, exist_ok=True)

    sn_samples = random.sample(sn_pairs, 1000)
    hm_samples = random.sample(hm_pairs, 500)

    with open(turk_inf_dir / 'shapenet_pairs.json', 'w') as f:
        json.dump([p.id for p in sn_samples], f)

    with open(turk_inf_dir / 'hermanmiller_pairs.json', 'w') as f:
        json.dump([p.id for p in hm_samples], f)
Exemplo n.º 5
0
def render_segment_map(pbar, d, params):
    with session_scope() as sess:
        pair = sess.query(models.ExemplarShapePair).get(params['pair_id'])
        rk_mesh, _ = pair.shape.load()

    shape_id = params['shape_id']
    cam_fov = params['camera']['fov']
    cam_azimuth = params['camera']['azimuth']
    cam_elevation = params['camera']['elevation']
    cam_dist = params['camera']['distance']

    rk_mesh.resize(1)

    # Set random camera.
    rk_camera = cameras.spherical_coord_to_cam(cam_fov,
                                               cam_azimuth,
                                               cam_elevation,
                                               cam_dist=cam_dist,
                                               max_len=500)

    seg_map = shortcuts.render_segments(rk_mesh, rk_camera)
    seg_vis = visualize_map(seg_map)[:, :, :3]

    path = d['seg_vis_path']
    pbar.set_description(f'{path}')

    imsave(d['seg_vis_path'], seg_vis)
    imsave(d['seg_map_path'], (seg_map + 1).astype(np.uint8))

    vis.image(visualize_map(seg_map).transpose((2, 0, 1)))
Exemplo n.º 6
0
async def material_tree_json(request: web.Request):
    with session_scope() as sess:
        materials: List[Material] = (sess.query(Material).filter(
            Material.enabled.is_(True)).order_by(Material.substance.asc(),
                                                 Material.id.asc()).all())

    material_by_substance = defaultdict(list)
    for material in materials:
        if material.substance:
            material_by_substance[material.substance].append(material)

    tree = {
        'name':
        'materials',
        'children': [{
            'name':
            subst,
            'text':
            subst,
            'children': [{
                'name':
                mat.name,
                'size':
                120000,
                'img':
                nginx_url(mat.get_data_path('previews/bmps.png')),
            } for mat in subst_mats if mat.substance]
        } for subst, subst_mats in material_by_substance.items()],
    }

    return web.json_response(tree, dumps=partial(json.dumps, indent=2))
Exemplo n.º 7
0
async def list_materials(request: web.Request):
    substances = request.query.get('substance', None)
    types = request.query.get('type', None)
    page = int(request.query.get('page', 0))
    page_size = int(request.query.get('page_size', 25))
    filters = []
    if substances:
        filters.append(Material.substance.in_(substances.split(',')))
    if types:
        filters.append(Material.type.in_(types.split(',')))

    with session_scope() as sess:
        materials, count = controllers.fetch_materials(
            sess,
            page_size,
            page,
            order_by=(Material.substance.asc(), Material.id.asc()),
            filters=filters)

    n_pages = int(math.ceil(count / page_size))

    return {
        'query': request.query,
        'substances': SUBSTANCES,
        'materials': materials,
        'cur_page': page,
        'page_size': page_size,
        'n_total': count,
        'n_pages': n_pages,
    }
Exemplo n.º 8
0
def main():

    with session_scope(commit=False) as sess:
        substance_dirs = list(Path(args.base_dir).iterdir())
        for substance_dir in sorted(substance_dirs):
            mdl_paths = []
            for material_dir in substance_dir.iterdir():
                mdl_path = list(material_dir.glob('*.mdl'))[0]
                mdl_paths.append(mdl_path)

            for mdl_path in sorted(mdl_paths, key=lambda s: s.stem):
                mdl_dict = mdl.parse_mdl(mdl_path)
                spatially_varying = isinstance(mdl_dict['base_color'], str)

                material = models.Material(
                    type=models.MaterialType.MDL,
                    name=mdl_path.stem,
                    substance=substance_dir.name,
                    spatially_varying=spatially_varying,
                    source='adobe_stock',
                    source_id=mdl_path.parent.name.split('_')[1],
                    params=mdl_dict,
                )

                if (sess.query(models.Material).filter_by(
                        type=material.type,
                        source_id=material.source_id).count() > 0):
                    logger.info('Material already exists',
                                **material.serialize())
                    continue

                logger.info('Adding material', **material.serialize())

                sess.add(material)
                sess.commit()
Exemplo n.º 9
0
def main():
    tqdm.write(f'Loading pairs')

    base_dir = Path('/projects/grail/kparnb/data/terial/turk-study/inference')
    pop_names = ['photos', 'default', 'random', 'flagship']
    pop_addrs = [
        ('default/rends-nofloor-cropped', 'mtl', 'default'),
        ('random/rends-nofloor-cropped', 'inferred', 'random'),
        ('flagship/rends-nofloor-cropped', 'inferred', 'flagship'),
    ]

    with (base_dir / 'hermanmiller_pairs.json').open('r') as f:
        hermanmiller_pair_ids = json.load(f)

    with (base_dir / 'shapenet_pairs.json').open('r') as f:
        shapenet_pair_ids = json.load(f)

    with session_scope() as sess:
        hermanmiller_pairs = (
            sess.query(models.ExemplarShapePair)
                .options(orm.joinedload(models.ExemplarShapePair.exemplar))
                .filter(models.ExemplarShapePair.id.in_(hermanmiller_pair_ids))
                .all())
        shapenet_pairs = (
            sess.query(models.ExemplarShapePair)
                .options(orm.joinedload(models.ExemplarShapePair.exemplar))
                .filter(models.ExemplarShapePair.id.in_(shapenet_pair_ids))
                .all())

    datasets = [
        ('shapenet', shapenet_pairs),
        ('hermanmiller', hermanmiller_pairs),
    ]

    images_dir = args.out_dir / 'images'

    tqdm.write(f'Saving pair photos')
    exemplar_out_dir = images_dir / 'photos'
    exemplar_out_dir.mkdir(exist_ok=True, parents=True)

    fieldnames = ['pair_id', 'dset_name', 'pop_name', 'url']

    for dset_name, dset_samps in datasets:
        rows = []
        for pop_name in pop_names:
            for pair in dset_samps:
                rows.append({
                    'dset_name': dset_name,
                    'pop_name': pop_name,
                    'pair_id': pair.id,
                    'url': f'{dset_name}/{pop_name}/{pair.id}.jpg' if pop_name != 'photos' else f'photos/{pair.id}.jpg'
                })

        random.shuffle(rows)

        csv_path = args.out_dir / f'{dset_name}.csv'
        with csv_path.open('w') as f:
            writer = csv.DictWriter(f, fieldnames=fieldnames)
            writer.writeheader()
            writer.writerows(rows)
Exemplo n.º 10
0
async def post_annotation(request: web.Request):
    data = dict(await request.post())
    check_auth(data)
    result_set = get_result_set_or_raise(data)
    try:
        pair_id = int(data['pair_id'])
    except ValueError:
        raise web.HTTPBadRequest(text="Invalid pair_id")

    with session_scope() as sess:
        pair = sess.query(ExemplarShapePair).get(pair_id)
        if pair is None:
            raise web.HTTPNotFound(text="No such pair")

        annotation = sess.query(ResultAnnotation).filter_by(
            username=data['username'],
            shape_id=pair.shape.id,
            pair_id=pair.id,
            result_set_id=result_set.id).first()

        if annotation:
            annotation.category = data['category']
        else:
            annotation = ResultAnnotation(username=data['username'],
                                          category=data['category'],
                                          shape_id=pair.shape.id,
                                          pair_id=pair.id,
                                          result_set_id=result_set.id,
                                          date_updated=datetime.now())
            sess.add(annotation)
        sess.commit()

        return web.json_response(data=annotation.serialize())
Exemplo n.º 11
0
def main():

    with session_scope(commit=True) as sess:
        with open(CSV_FILE_PATH, 'r') as f:
            reader = csv.reader(f)
            for line in reader:
                tup = MERLTuple(*line[:len(MERLTuple._fields)])
                if tup.name.strip() == '':
                    continue
                material = models.Material(
                    type=models.MaterialType.BLINN_PHONG,
                    name=tup.name.strip(),
                    spatially_varying=False,
                    enabled=False,
                    params={
                        'diffuse': (tup.diff_r, tup.diff_g, tup.diff_b),
                        'specular': (tup.spec_r, tup.spec_g, tup.spec_b),
                        'shininess': tup.shininess,

                    }
                )
                if (sess.query(models.Material)
                        .filter_by(type=material.type, name=material.name)
                        .count() > 0):
                    logger.info('Material already exists',
                                **material.serialize())
                    continue
                logger.info('Adding material', **material.serialize())
                sess.add(material)
Exemplo n.º 12
0
async def list_shapes(request: web.Request):
    page = int(request.query.get('page', 0))
    page_size = int(request.query.get('page_size', 100))
    source = request.query.get('source', None)

    filters = []
    if source:
        filters.append(Shape.source == source)

    with session_scope() as sess:
        query = sess.query(Shape).filter(sa.and_(*filters))
        shapes: List[Shape] = (query.order_by(Shape.id.asc()).offset(
            page * page_size).limit(page_size).all())

        count = query.count()

    n_pages = int(math.ceil(count / page_size))

    return {
        'shapes': shapes,
        'cur_page': page,
        'page_size': page_size,
        'n_total': count,
        'n_pages': n_pages,
    }
Exemplo n.º 13
0
def main(out_dir):
    out_dir = Path(out_dir)
    app = brender.Brender()

    materials_by_substance = defaultdict(list)
    with session_scope() as sess:
        materials = sess.query(Material).filter_by(enabled=True).all()
        for material in materials:
            materials_by_substance[material.substance].append(material)

        # pairs, count = controllers.fetch_pairs(
        #     sess, max_dist=config.ALIGN_DIST_THRES,
        #     filters=[ExemplarShapePair.id >= start],
        #     order_by=ExemplarShapePair.shape_id.asc(),
        # )

        pairs, count = controllers.fetch_pairs(
            sess,
            by_shape=True,
            order_by=ExemplarShapePair.distance.asc(),
        )

        print(f'Fetched {len(pairs)} pairs. '
              f'align_dist_thres = {config.ALIGN_DIST_THRES}')

        pair_pbar = tqdm(pairs)
        for i, pair in enumerate(pair_pbar):
            pair_pbar.set_description(f'Pair {pair.id}')

            if not pair.data_exists(config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME):
                continue

            app.init()
            render_pair(app, pair, materials_by_substance, out_dir)
Exemplo n.º 14
0
def main(out_dir):
    out_dir = Path(out_dir)
    out_dir.mkdir(exist_ok=True, parents=True)

    with session_scope() as sess:
        pairs, count = controllers.fetch_pairs(
            sess,
            by_shape=True,
            # max_dist=config.ALIGN_DIST_THRES,
            order_by=ExemplarShapePair.distance.asc())

        logger.info('Fetched %d pairs. align_dist_thres = %f', len(pairs),
                    config.ALIGN_DIST_THRES)

        pair_pbar = tqdm(pairs)
        for pair in pair_pbar:
            pair_pbar.set_description(f'Pair {pair.id}')

            if not pair.data_exists(config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME):
                continue

            seg_mat_candidates = assign_materials(pair)
            if seg_mat_candidates and len(seg_mat_candidates) > 0:
                with Path(out_dir, f'{pair.id}.json').open('w') as f:
                    json.dump(seg_mat_candidates, f, indent=2)
Exemplo n.º 15
0
def main():
    with session_scope() as sess:
        pairs = (sess.query(ExemplarShapePair)
                 .filter(ExemplarShapePair.distance < config.ALIGN_DIST_THRES)
                 .all())

        print(f'Fetched {len(pairs)} pairs. '
              f'align_dist_thres = {config.ALIGN_DIST_THRES}',
              len(pairs), config.ALIGN_DIST_THRES)

        pbar = tqdm(pairs)
        for pair in pbar:
            pbar.set_description(f'[Pair {pair.id}]')
            mesh, materials = pair.shape.load()
            camera = cameras.spherical_coord_to_cam(
                pair.fov, pair.azimuth, pair.elevation)
            camera.near, camera.far = \
                compute_tight_clipping_planes(mesh, camera.view_mat())
            segment_im = render_segments(mesh, camera)
            fg_bbox = mask_bbox(segment_im > -1)
            pair.params = {
                'camera': camera.tojsd(),
                'crop_bbox': fg_bbox,
            }
            sess.commit()
Exemplo n.º 16
0
def main():

    with session_scope(commit=True) as sess:
        substance_dirs = list(config.MATERIAL_DIR_POLIIGON.iterdir())
        for substance_dir in substance_dirs:
            for material_dir in substance_dir.iterdir():
                material = models.Material(
                    type=models.MaterialType.POLIIGON,
                    name=material_dir.name,
                    substance=substance_dir.name,
                    spatially_varying=True,
                )

                annot_path = material_dir / 'annotations.json'
                if annot_path.exists():
                    with open(annot_path, 'r') as f:
                        annot = json.load(f)
                        if 'scale' in annot:
                            material.min_scale = annot['scale']

                if (sess.query(models.Material)
                        .filter_by(type=material.type, name=material.name)
                        .count() > 0):
                    logger.info('Material already exists',
                                **material.serialize())
                    continue

                logger.info('Adding material', **material.serialize())

                sess.add(material)
Exemplo n.º 17
0
def main():
    if not args.dataset_path.exists():
        print(f'Given path does not exist.')
        return

    dataset_name = args.dataset_path.name

    print('Determining number of paths.')
    count = len(list(args.dataset_path.glob('**/*params.json')))

    pbar = tqdm.tqdm(total=count)

    with session_scope() as sess:
        materials = sess.query(models.Material).all()
        material_by_id = {m.id: m for m in materials}
        for client_dir in args.dataset_path.iterdir():
            client = client_dir.name.split('=')[1]
            for epoch_dir in client_dir.iterdir():
                epoch = int(epoch_dir.name.split('=')[1])
                for split_set_dir in epoch_dir.iterdir():
                    split_set = models.SplitSet[split_set_dir.name.upper()]
                    for path in split_set_dir.glob('*.params.json'):
                        pbar.update(1)
                        prefix = path.name.split('.')[0]
                        if (sess.query(models.Rendering)
                                .filter_by(dataset_name=dataset_name,
                                           client=client,
                                           split_set=split_set,
                                           epoch=epoch,
                                           prefix=prefix).count() > 0):
                            continue
                        rendering = register(
                            sess, dataset_name, client, epoch, split_set,
                            prefix, path, material_by_id)
                        pbar.set_description(f'{rendering.id}')
Exemplo n.º 18
0
async def list_exemplars(request: web.Request):
    page = int(request.query.get('page', 0))
    page_size = int(request.query.get('page_size', 100))
    excluded = request.query.get('excluded', None)

    filters = []
    if excluded:
        filters.append(Exemplar.exclude == (excluded.lower() == 'true'))

    with session_scope() as sess:
        query = (sess.query(Exemplar)
                 .filter(sa.and_(*filters))
                 .order_by(Exemplar.id.asc())
                 )
        exemplars: List[Exemplar] = (query
                                     .offset(page * page_size)
                                     .limit(page_size)
                                     .all())
        count = query.count()

    n_pages = int(math.ceil(count / page_size))

    query = {}
    if excluded:
        query['excluded'] = excluded

    return {
        'exemplars': exemplars,
        'cur_page': page,
        'page_size': page_size,
        'n_total': count,
        'n_pages': n_pages,
    }
Exemplo n.º 19
0
async def show_renderings(request: web.Request):
    dataset_name = 'raw-20180426'
    page = int(request.query.get('page', 0))
    page_size = int(request.query.get('page_size', 100))

    with session_scope() as sess:
        query = (sess.query(models.Rendering).filter_by(exclude=False).join(
            (models.Material,
             models.Rendering.materials)).group_by(models.Rendering.id))
        num_total = query.count()
        renderings = query.offset(page * page_size).limit(page_size)

    num_pages = int(math.ceil(num_total / page_size))

    query = {}

    return {
        'renderings': renderings,
        'cur_page': page,
        'page_size': page_size,
        'n_total': num_total,
        'n_pages': num_pages,
        'dataset_name': dataset_name,
        'query': query,
    }
Exemplo n.º 20
0
def main():
    with open(CSV_PATH, 'r') as f:
        with session_scope(commit=False) as sess:
            reader = csv.DictReader(f)
            for row in reader:
                material = models.Material(
                    type=models.MaterialType.PRINCIPLED,
                    name=row['name'],
                    author='Keunhong Park',
                    substance=row['substance'],
                    spatially_varying=False,
                    params={
                        'diffuse_color': eval(row['diffuse_color']),
                        'specular': float(row['specular']),
                        'metallic': float(row['metallic']),
                        'roughness': float(row['roughness']),
                        'anisotropy': float(row['anisotropy']),
                        'anisotropic_rotation':
                        float(row['anisotropic_rotation']),
                        'clearcoat': float(row['clearcoat']),
                        'clearcoat_roughness':
                        float(row['clearcoat_roughness']),
                        'ior': float(row['ior']),
                    })

                if (sess.query(models.Material).filter_by(
                        type=material.type, name=material.name).count() > 0):
                    logger.info('Material already exists',
                                **material.serialize())
                    continue

                logger.info('Adding material', **material.serialize())

                sess.add(material)
                sess.commit()
Exemplo n.º 21
0
def main():
    args.out_dir.mkdir(exist_ok=True, parents=True)

    filters = []
    if args.shape_source:
        filters.append(models.Shape.source == args.shape_source)

    if args.shape_category:
        filters.append(models.Shape.category == args.shape_category)

    if args.topk:
        filters.append(models.ExemplarShapePair.rank <= args.topk)

    inference_dir = "/projects/grail/kparnb/photoshape/brdf-classifier" \
                    "/inference/20180516-500x500/20180527.022554.resnet34" \
                    ".opensurface_pretrained_on_substance_only.subst_loss=fc" \
                    ".color_loss=none.lab_10,10,10_1.0,1.0," \
                    "1.0.lr=0.0001.mask_noise_p=0.0.use_variance.sanka/45"

    with session_scope() as sess:
        shapes, count, pair_count = controllers.fetch_shapes_with_pairs(
            sess, filters=filters, max_dist=args.max_dist)
        print(f"Fetched {count} shapes.")
        for shape in tqdm(shapes):
            pairs: List[models.ExemplarShapePair] = shape.get_topk_pairs(args.topk, args.max_dist)
            for pair_rank, pair in enumerate(tqdm(pairs)):
                json_path = Path(inference_dir, f'{pair.id}.json')
                if json_path.exists():
                    make_blend_file(json_path, args.out_dir, shape, pair, pair_rank)
def main():
    warnings.filterwarnings('ignore', '.*output shape of zoom.*')
    # if args.frontal:
    #     renderings_dir = args.inference_dir / 'blend-frontal'
    # elif args.frontal:
    #     renderings_dir = args.inference_dir / 'blend-diagonal'
    # else:
    #     renderings_dir = args.inference_dir / 'blend'
    renderings_dir = args.inference_dir / args.out_name
    renderings_dir.mkdir(parents=True, exist_ok=True)

    filters = []
    if args.category is not None:
        filters.append(ExemplarShapePair.shape.has(category=args.category))

    with session_scope() as sess:
        # pairs, count = controllers.fetch_pairs_default(sess)
        pairs, count = controllers.fetch_pairs(
            sess,
            filters=filters,
            by_shape=False,
            order_by=ExemplarShapePair.id.asc(),
        )

    pool = multiprocessing.Pool(processes=args.num_workers)

    pair_ids = [p.id for p in pairs]

    pbar = tqdm(total=len(pairs))
    for i in pool.imap_unordered(
            partial(worker, renderings_dir=renderings_dir), pair_ids):
        pbar.set_description(str(i))
        pbar.update(1)
Exemplo n.º 23
0
async def get_uncropped_exemplar(request: web.Request):
    pair_id = request.match_info['pair_id']
    with session_scope() as sess:
        pair = sess.query(ExemplarShapePair).get(pair_id)
        if pair is None:
            raise web.HTTPNotFound(text='No such pair.')

        uncropped_im = pair_utils.pair_uncropped_exemplar(pair)

    image = Image.fromarray(uncropped_im)
    buf = io.BytesIO()
    image.save(buf, format='JPEG')
    buf.seek(0)

    out_name = f'{pair.id}.{pair.exemplar.id}.uncropped.jpg'
    response = web.StreamResponse(
        status=200,
        headers=MultiDict({
            'Content-Type': 'image/jpeg',
            # 'Content-Disposition': f'attachment; filename={out_name}'
        }))
    await response.prepare(request)
    await response.write(buf.getvalue())
    await response.write_eof()
    return response
Exemplo n.º 24
0
def register_shape(app, path):
    with suppress_stdout():
        scene = brender.Scene(app, shape=(1000, 1000))
        mesh = brender.Mesh.from_3ds(scene, path)
        mesh.remove_doubles()
        mesh.make_normals_consistent()
        mesh.enable_smooth_shading()
        mesh.unwrap_uv()

    with database.session_scope() as sess:
        shape = Shape(source=args.source_name,
                      source_id=path.name,
                      category=args.category)
        sess.add(shape)
        sess.flush()
        shape_dir = Path(config.BLOB_ROOT, 'shapes', str(shape.id))

        bpy.ops.export_scene.obj(filepath=str(TMP_OBJ_PATH))

        try:
            shape.models_dir.mkdir(parents=True)
            shutil.copy(str(TMP_OBJ_PATH), str(shape.obj_path))
            shutil.copy(str(TMP_MTL_PATH), str(shape.mtl_path))
        except:
            shape.obj_path.unlink()
            shape.mtl_path.unlink()
            shape_dir.rmdir()
            raise

        sess.commit()
Exemplo n.º 25
0
async def show_exemplars(request):
    page = int(request.query.get('page', 0))
    page_size = int(request.query.get('page_size', 100))
    with session_scope() as sess:
        exemplars = sess.query(models.Exemplar).filter_by(
            exclude=False).offset(page * page_size).limit(page_size).all()

    return {'exemplars': exemplars}
Exemplo n.º 26
0
def main():
    with session_scope() as sess:
        materials = (sess.query(
            models.Material).filter_by(type=models.MaterialType.MDL).all())

        for material in materials:
            bmat = loader.material_to_brender(material)
            print(bmat)
            del bmat
Exemplo n.º 27
0
def main():
    with database.session_scope() as sess:
        shapes = sess.query(Shape).filter_by(source='shapenet').all()

    metadata = shapenet.load_all_metadata()
    for shape in shapes:
        sn_model = shapenet.Model.from_id(shape.source_id)
        print(f"Processing shape {shape.id} ({sn_model.full_id})")
        register_shape(shape, sn_model)
Exemplo n.º 28
0
async def search_dataset_renderings(request: web.Request):
    dataset_name = request.match_info.get('dataset_name', '')
    page = int(request.query.get('page', 0))
    page_size = int(request.query.get('page_size', 100))

    material_id = request.query.get('material_id', None)
    try:
        max_saturated_frac = float(
            request.query.get('max_saturated_frac', 0.05))
    except ValueError:
        raise web.HTTPBadRequest(text='max_saturated_frac must be a float')

    try:
        min_saturated_frac = float(
            request.query.get('min_saturated_frac', 0.0))
    except ValueError:
        raise web.HTTPBadRequest(text='min_saturated_frac must be a float')

    filters = [
        models.Rendering.dataset_name == dataset_name,
        models.Rendering.saturated_frac <= max_saturated_frac,
        models.Rendering.saturated_frac >= min_saturated_frac,
    ]

    if material_id:
        try:
            material_id = int(material_id)
        except ValueError:
            raise web.HTTPBadRequest(text='material_id must be integer')
        else:
            filters.append(models.Material.id == material_id)

    with session_scope() as sess:
        query = (sess.query(models.Rendering)
                 .filter_by(exclude=False)
                 .join((models.Material, models.Rendering.materials))
                 .filter(sa.and_(*filters))
                 .group_by(models.Rendering.id))
        num_total = query.count()
        renderings = query.offset(page * page_size).limit(page_size)

    num_pages = int(math.ceil(num_total / page_size))

    query = {}
    if material_id:
        query['material_id'] = material_id

    return {
        'renderings': renderings,
        'cur_page': page,
        'page_size': page_size,
        'n_total': num_total,
        'n_pages': num_pages,
        'dataset_name': dataset_name,
        'query': query,
    }
Exemplo n.º 29
0
def main():
    with session_scope() as sess:
        result = sess.execute("""
    update exemplar_shape_pair p set rank = s.rank from (
    select id, rank() over (partition by shape_id order by distance asc)
    from exemplar_shape_pair
        ) s
    where p.id = s.id;
    """)
    print(str(result))
Exemplo n.º 30
0
async def show_shapes(request):
    page = int(request.query.get('page', 0))
    page_size = int(request.query.get('page_size', 100))
    with session_scope() as sess:
        shapes = sess.query(models.Shape).filter_by(exclude=False).offset(
            page * page_size).limit(page_size).all()

    return {
        'shapes': shapes,
    }