def evaluate_config(config): data_model = PlusShapeModel() try: results = {'IoU': [], 'DICE': [], 'max_h': [], 'avg_h': []} sample = np.load(f'results/{config.replace(".", "-")}_0_sample.npy') # print(config, sample.shape) # return curves = data_model.trace_fourier_curves(sample) curves_dense = data_model.trace_fourier_curves(sample, n_points=1000) for j in range(len(curves)): points = torch.tensor(curves[j]).float().cpu() params = fit_plus_shape_to_points(points) iou, dice = iou_and_dice_plus_shape(params, points) max_h, avg_h = max_and_avg_hausdorff_distance_plus_shape( params, curves_dense[j]) print(config, j, iou, dice, max_h, avg_h, flush=True) results['IoU'].append(iou) results['DICE'].append(dice) results['max_h'].append(max_h) results['avg_h'].append(avg_h) iou = np.mean(results['IoU']) dice = np.mean(results['DICE']) max_h = np.mean(results['max_h']) avg_h = np.mean(results['avg_h']) print(iou, dice, max_h, avg_h) np.save(f'results/{config.replace(".", "-")}_0', np.stack([iou, dice, max_h, avg_h])) except Exception as e: print(f'ERROR with config "{config}"') print(e) traceback.print_exc()
'n_reflections': c['ndim_x'] }, name=f'perm_{i+1}')) nodes.append( Node(nodes[-1], AffineCoupling, { 'F_class': F_fully_connected, 'F_args': { 'internal_size': c['hidden_layer_sizes'] } }, name=f'ac_{i+1}')) nodes.append(OutputNode(nodes[-1], name='z')) model = ReversibleGraphNet(nodes, verbose=False) model.to(c['device']) model.params_trainable = list( filter(lambda p: p.requires_grad, model.parameters())) def model_inverse(test_z): return model([test_z], rev=True) c['model'] = model c['model_inverse'] = model_inverse # create namedtuple from config dictionary c = namedtuple("Configuration", c.keys())(*c.values())
for i in range(c['n_blocks']): if i > 0: x_lane.append(Node(x_lane[-1], HouseholderPerm, {'fixed': False, 'n_reflections': c['ndim_x']}, name=f'perm_{i}')) x_lane.append(Node(x_lane[-1], HierarchicalAffineCouplingBlock, {'c_internal': [c['hidden_layer_sizes'], c['hidden_layer_sizes']//2, c['hidden_layer_sizes']//4, c['hidden_layer_sizes']//8], 'max_splits': c['recursion_depth']}, name=f'hac_{i+1}')) x_lane.append(OutputNode(x_lane[-1], name='z')) model = ReversibleGraphNet(x_lane, verbose=False) model.to(c['device']) model.params_trainable = list(filter(lambda p: p.requires_grad, model.parameters())) def model_inverse(test_z): return model(test_z, rev=True) c['model'] = model c['model_inverse'] = model_inverse # create namedtuple from config dictionary c = namedtuple("Configuration",c.keys())(*c.values())
# create namedtuple from config dictionary c = namedtuple("Configuration",c.keys())(*c.values()) ############################## ### MODEL ARCHITECTURE ### ############################## nodes = [InputNode(c.ndim_x, name='x')] for i in range(c.n_blocks): nodes.append(Node(nodes[-1], HouseholderPerm, {'fixed': False, 'n_reflections': c.ndim_x}, name=f'perm_{i+1}')) nodes.append(Node(nodes[-1], AffineCoupling, {'F_class': F_fully_connected, 'F_args': {'internal_size': c.hidden_layer_sizes}}, name=f'ac_{i+1}')) nodes.append(OutputNode(nodes[-1], name='z')) model = ReversibleGraphNet(nodes, verbose=False) model.to(c.device) def model_inverse(test_z): return model([test_z], rev=True)