def main(out_dir): out_dir = Path(out_dir) out_dir.mkdir(exist_ok=True, parents=True) with session_scope() as sess: pairs, count = controllers.fetch_pairs( sess, by_shape=True, # max_dist=config.ALIGN_DIST_THRES, order_by=ExemplarShapePair.distance.asc()) logger.info('Fetched %d pairs. align_dist_thres = %f', len(pairs), config.ALIGN_DIST_THRES) pair_pbar = tqdm(pairs) for pair in pair_pbar: pair_pbar.set_description(f'Pair {pair.id}') if not pair.data_exists(config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME): continue seg_mat_candidates = assign_materials(pair) if seg_mat_candidates and len(seg_mat_candidates) > 0: with Path(out_dir, f'{pair.id}.json').open('w') as f: json.dump(seg_mat_candidates, f, indent=2)
def main(): warnings.filterwarnings('ignore', '.*output shape of zoom.*') # if args.frontal: # renderings_dir = args.inference_dir / 'blend-frontal' # elif args.frontal: # renderings_dir = args.inference_dir / 'blend-diagonal' # else: # renderings_dir = args.inference_dir / 'blend' renderings_dir = args.inference_dir / args.out_name renderings_dir.mkdir(parents=True, exist_ok=True) filters = [] if args.category is not None: filters.append(ExemplarShapePair.shape.has(category=args.category)) with session_scope() as sess: # pairs, count = controllers.fetch_pairs_default(sess) pairs, count = controllers.fetch_pairs( sess, filters=filters, by_shape=False, order_by=ExemplarShapePair.id.asc(), ) pool = multiprocessing.Pool(processes=args.num_workers) pair_ids = [p.id for p in pairs] pbar = tqdm(total=len(pairs)) for i in pool.imap_unordered( partial(worker, renderings_dir=renderings_dir), pair_ids): pbar.set_description(str(i)) pbar.update(1)
def main(out_dir): out_dir = Path(out_dir) app = brender.Brender() materials_by_substance = defaultdict(list) with session_scope() as sess: materials = sess.query(Material).filter_by(enabled=True).all() for material in materials: materials_by_substance[material.substance].append(material) # pairs, count = controllers.fetch_pairs( # sess, max_dist=config.ALIGN_DIST_THRES, # filters=[ExemplarShapePair.id >= start], # order_by=ExemplarShapePair.shape_id.asc(), # ) pairs, count = controllers.fetch_pairs( sess, by_shape=True, order_by=ExemplarShapePair.distance.asc(), ) print(f'Fetched {len(pairs)} pairs. ' f'align_dist_thres = {config.ALIGN_DIST_THRES}') pair_pbar = tqdm(pairs) for i, pair in enumerate(pair_pbar): pair_pbar.set_description(f'Pair {pair.id}') if not pair.data_exists(config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME): continue app.init() render_pair(app, pair, materials_by_substance, out_dir)
async def show_prcs(request): resultset_id = request.match_info.get('resultset_id') with session_scope() as sess: resultset = sess.query(models.ResultSet).get(resultset_id) snapshot_name = resultset.snapshot_name model_name = resultset.model_name epoch = resultset.inference_name page = int(request.query.get('page', 0)) page_size = int(request.query.get('page_size', 100)) shuffle = request.query.get('shuffle', 'false') == 'true' max_dist = int(request.query.get('max_dist', config.INFERENCE_MAX_DIST)) topk = int(request.query.get('topk', config.INFERENCE_TOPK)) pair_ids = request.query.get('pair_ids', None) if pair_ids is not None: pair_ids = pair_ids.replace(' ', '').strip(', ') pair_ids = [int(i) for i in pair_ids.split(',')] filters = [] if pair_ids: filters.append(models.ExemplarShapePair.id.in_(pair_ids)) if shuffle: order_by = func.random() else: order_by = ExemplarShapePair.shape_id.asc() with session_scope() as sess: pairs, count = controllers.fetch_pairs(sess, page=page, page_size=page_size, max_dist=max_dist, order_by=order_by, by_shape_topk=topk, by_shape=True, filters=filters) n_pages = int(math.ceil(count / page_size)) inference_dir = (config.BRDF_CLASSIFIER_DIR_REMOTE / 'inference' / snapshot_name / model_name / epoch) print(inference_dir) return { 'inference_dir': inference_dir, 'snapshot_name': snapshot_name, 'model_name': model_name, 'epoch': epoch, 'cur_page': page, 'page_size': page_size, 'n_total': count, 'n_pages': n_pages, 'pairs': pairs, 'resultset_id': resultset_id, }
def main(): warnings.simplefilter("error", UserWarning) with session_scope() as sess: pairs, count = controllers.fetch_pairs( sess, max_dist=config.ALIGN_DIST_THRES, order_by=ExemplarShapePair.distance.asc(), ) print(f"Fetched {count} pairs") base_pattern = np.dstack( (np.zeros(config.SHAPE_REND_SHAPE), *np.meshgrid(np.linspace(0, 1, config.SHAPE_REND_SHAPE[0]), np.linspace(0, 1, config.SHAPE_REND_SHAPE[1])))) pbar = tqdm(pairs) pair: ExemplarShapePair for pair in pbar: pbar.set_description(f'Pair {pair.id}') # if not pair.exemplar.data_exists(config.EXEMPLAR_SUBST_MAP_NAME, type='numpy'): # logger.warning('pair %d does not have substance map', pair.id) # continue if not pair.data_exists(config.FLOW_DATA_NAME): print(f'Pair {pair.id} does not have flow') continue exemplar_sil = bright_pixel_mask( pair.exemplar.load_cropped_image(), percentile=95) exemplar_sil = binary_closing(exemplar_sil, selem=disk(3)) exemplar_sil = transform.resize(exemplar_sil, (500, 500), anti_aliasing=True, mode='reflect') shape_sil = pair.load_data(config.SHAPE_REND_SEGMENT_MAP_NAME) - 1 shape_sil = (shape_sil > -1) shape_sil = binary_closing(shape_sil, selem=disk(3)) exemplar_sil_im = exemplar_sil[:, :, None].repeat(repeats=3, axis=2).astype(float) shape_sil_im = shape_sil[:, :, None].repeat(repeats=3, axis=2).astype(float) exemplar_sil_im[exemplar_sil == 0] = base_pattern[exemplar_sil == 0] shape_sil_im[shape_sil == 0] = base_pattern[shape_sil == 0] pair.save_data(config.FLOW_SHAPE_SILHOUETTE_VIS, shape_sil_im) pair.save_data(config.FLOW_EXEMPLAR_SILHOUETTE_VIS, exemplar_sil_im)
def main(): with session_scope(commit=False) as sess: pairs, count = controllers.fetch_pairs( sess, max_dist=config.ALIGN_DIST_THRES, order_by=ExemplarShapePair.distance.asc()) pbar = tqdm(pairs) for pair in pbar: pbar.set_description(f'{pair.id}: loading') if pair.data_exists(config.SHAPE_REND_SEGMENT_MAP_NAME): seg_map = pair.load_data(config.SHAPE_REND_SEGMENT_MAP_NAME) - 1 seg_map_vis = toolbox.images.visualize_map(seg_map, bg_value=-1, values=range( -1, seg_map.max() + 1)) pbar.set_description(f'{pair.id}: saving seg map vis') pair.save_data(config.SHAPE_REND_SEGMENT_VIS_NAME, seg_map_vis) if pair.data_exists(config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME): seg_map = pair.load_data( config.PAIR_SHAPE_CLEAN_SEGMENT_MAP_NAME) - 1 seg_map_vis = toolbox.images.visualize_map(seg_map, bg_value=-1, values=range( -1, seg_map.max() + 1)) pbar.set_description(f'{pair.id}: saving clean seg map vis') pair.save_data(config.PAIR_SHAPE_CLEAN_SEGMENT_VIS_NAME, seg_map_vis) if pair.data_exists(config.PAIR_SHAPE_WARPED_SEGMENT_MAP_NAME): seg_map = pair.load_data( config.PAIR_SHAPE_WARPED_SEGMENT_MAP_NAME) - 1 seg_map_vis = toolbox.images.visualize_map(seg_map, bg_value=-1, values=range( -1, seg_map.max() + 1)) pbar.set_description(f'{pair.id}: saving warped seg map vis') pair.save_data(config.PAIR_SHAPE_WARPED_SEGMENT_VIS_NAME, seg_map_vis)
def main(): warnings.filterwarnings('ignore', '.*output shape of zoom.*') with session_scope() as sess: pairs, count = controllers.fetch_pairs( sess, by_shape=False, order_by=ExemplarShapePair.distance.asc(), ) print(f"Fetched {count} pairs") pbar = tqdm(pairs) for pair in pbar: render_model_exemplars(pbar, pair)
def main(): warnings.filterwarnings('ignore', '.*output shape of zoom.*') with session_scope() as sess: pairs, count = controllers.fetch_pairs( sess, by_shape=True, by_shape_topk=5, order_by=ExemplarShapePair.distance.asc(), ) print(f"Fetched {count} pairs") pbar = tqdm(pairs) for pair in pbar: # if pair.data_exists(config.SHAPE_REND_PREVIEW_NAME): # continue render_model_exemplars(pbar, pair)
async def _pair_inference_results(request): snapshot_name = request.match_info.get('snapshot') model_name = request.match_info.get('model') epoch = request.match_info.get('epoch') page = int(request.query.get('page', 0)) page_size = int(request.query.get('page_size', 100)) num_cols = int(request.query.get('num_cols', 1)) category = request.query.get('category') pair_ids = request.query.get('pair_ids', None) if pair_ids is not None: pair_ids = pair_ids.replace(' ', '').strip(', ') pair_ids = [int(i) for i in pair_ids.split(',')] if model_name is None: raise web.HTTPBadRequest() if snapshot_name is None: raise web.HTTPBadRequest() snapshot_dir = config.BRDF_CLASSIFIER_DIR_REMOTE / 'snapshots' / snapshot_name if not snapshot_dir.exists(): snapshot_dir = config.BRDF_CLASSIFIER_DIR_REMOTE / 'lmdb' / snapshot_name if not snapshot_dir.exists(): raise web.HTTPNotFound() inference_dir = (config.BRDF_CLASSIFIER_DIR_REMOTE / 'inference' / snapshot_name / model_name / epoch) if not inference_dir.exists(): raise web.HTTPNotFound() if (snapshot_dir / 'snapshot.json').exists(): with (snapshot_dir / 'snapshot.json').open('r') as f: snapshot_dict = json.load(f) else: with (snapshot_dir / 'meta.json').open('r') as f: snapshot_dict = json.load(f) filters = [] if pair_ids: filters.append(models.ExemplarShapePair.id.in_(pair_ids)) if category is not None: filters.append(ExemplarShapePair.shape.has(category=category)) with session_scope() as sess: materials, _ = controllers.fetch_materials(sess) mat_by_id = {m.id: m for m in materials} pairs, count = controllers.fetch_pairs( sess, page=page, page_size=page_size, max_dist=config.INFERENCE_MAX_DIST, by_shape_topk=3, by_shape=True, filters=filters) pair_inferences = [] for pair in pairs: pair_inference = get_pair_inference_figures(inference_dir, pair) if pair_inference is not None: pair_inferences.append(pair_inference) # random.shuffle(pair_inferences) n_pages = int(math.ceil(count / page_size)) return { 'snapshot_name': snapshot_name, 'dataset_name': snapshot_dict.get('dataset_name', snapshot_dict.get('dataset', '<unknown dataset>')), 'model_name': model_name, 'epoch': epoch, 'cur_page': page, 'page_size': page_size, 'n_total': count, 'n_pages': n_pages, 'pair_inferences': pair_inferences, 'mat_by_id': mat_by_id, 'num_cols': num_cols, 'col_size': math.ceil(len(pair_inferences) / num_cols), }
async def _list_pairs(request): by_shape = request.query.get('by_shape', 'true') == 'true' max_dist = request.query.get('max_dist', config.ALIGN_DIST_THRES) source = request.query.get('source', None) sort_field = request.query.get('sort_field', 'distance') sort_order = request.query.get('sort_order', 'asc') category = request.query.get('category', None) sort_field = { 'pair_id': ExemplarShapePair.id, 'exemplar_id': ExemplarShapePair.exemplar_id, 'shape_id': ExemplarShapePair.shape_id, 'distance': ExemplarShapePair.distance, }.get(sort_field, None) pair_ids = request.query.get('pair_ids', None) if sort_field is None: return { 'error': 'Invalid value for sort_field', } if sort_order == 'asc': order_by = sort_field.asc() elif sort_order == 'desc': order_by = sort_field.desc() else: return { 'error': 'Invalid value for sort_order', } filters = [] if pair_ids: pair_ids = [int(i) for i in pair_ids.split(',')] filters.append(ExemplarShapePair.id.in_(pair_ids)) if source: filters.append(ExemplarShapePair.shape.has(source=source)) if category is not None: filters.append(ExemplarShapePair.shape.has(category=category)) consistent_segments = \ request.query.get('consistent_segments', 'false') == 'true' if consistent_segments: filters.append( ExemplarShapePair.num_segments >= ExemplarShapePair.num_substances) filters.append(ExemplarShapePair.num_segments.isnot(None)) page = int(request.query.get('page', 0)) page_size = int(request.query.get('page_size', 100)) by_shape_topk = int(request.query.get('by_shape_topk', 5)) with session_scope() as sess: pairs, count = controllers.fetch_pairs(sess, by_shape=by_shape, by_shape_topk=by_shape_topk, max_dist=max_dist, filters=filters, page_size=page_size, page=page, order_by=order_by) n_pages = int(math.ceil(count / page_size)) return { 'pairs': pairs, 'cur_page': page, 'page_size': page_size, 'n_total': count, 'n_pages': n_pages, }