def variance_verify_deformation(img, landmarks, filename): sys.path.append('../nn') import nn_lib net = nn_lib.Net() # net.Load("../nn/deformation_model_10-12-2014_12-54-16.bin") net.Load(filename) N = 50000 sel_label = range(0, 25, 4) + range(25, 50, 4) sel_label_other = range(2, 25, 4) + range(27, 50, 4) all_network_dims = eval(net.GetAllLayerOutputDims()) print all_network_dims layer_outputs = [] for dims in all_network_dims: mat_size = dims[0] * dims[1] * dims[2] layer_outputs.append(numpy.zeros((N, mat_size), dtype='f4')) # Dump all responses. ps, deforms = dump_deformed_images(img, landmarks, 'backward', N) net.DumpAllResponses(deforms, layer_outputs) # net.EvaluateRegression(deforms, labels_est); threshold = 2.0 sel_data_label = selector(ps, sel_label, 0, threshold) sel_data_other = selector(ps, sel_label_other, 0, threshold) for i in range(len(all_network_dims)): var_label = verify_variance(sel_data_label, layer_outputs[i]) var_other = verify_variance(sel_data_other, layer_outputs[i]) print "Variance_label in layer %d = %f" % (i, var_label) print "Variance_other in layer %d = %f" % (i, var_other)
def lipschitz_verify_deformation(img, landmarks, filename): sys.path.append('../nn') import nn_lib net = nn_lib.Net() # net.Load("../nn/deformation_model_10-12-2014_12-54-16.bin") net.Load(filename) sel_label = range(0, 25, 4) + range(25, 50, 4) # Dump all responses. all_network_dims = eval(net.GetAllLayerOutputDims()) print all_network_dims # build a list of numpy arrays. N = 2000 layer_outputs = [] for dims in all_network_dims: mat_size = dims[0] * dims[1] * dims[2] layer_outputs.append(numpy.zeros((N, mat_size), dtype='f4')) ps, deforms = dump_deformed_images(img, landmarks, 'backward', N) net.DumpAllResponses(deforms, layer_outputs) sel_label = range(0, 25, 4) + range(25, 50, 4) #labels = (ps[:,sel_label] > 0).astype('f4'); labels = 1.0 / (1.0 + numpy.exp(-ps[:, sel_label])) # For each layer, find pairwise distances. pw_images = pairwise_l2_dist(deforms) nlayer = len(all_network_dims) pw_labels = pairwise_l2_dist(labels) colors = ['r', 'g', 'b', 'c'] styles = ['-', '--'] cs = list(itertools.product(styles, colors)) for i in range(len(all_network_dims)): # print "Size at layer " + str(i) + ":" + str(layer_outputs[i].shape); pw_layer_i = pairwise_l2_dist(layer_outputs[i]) Gs, gs, max_G = compute_lipschitz(pw_layer_i, pw_labels) print "layer %d: max_G = %f" % (i, max_G) plt.plot(Gs, gs, cs[i][0] + cs[i][1], label="Layer %d" % i) Gs, gs, max_G = compute_lipschitz(pw_labels, pw_labels) print "GroundTruth layer: max_G = %f" % max_G plt.plot(Gs, gs, 'b-', label="GroundTruth") Gs, gs, max_G = compute_lipschitz(pw_images, pw_labels) print "Input image: max_G = %f" % max_G plt.plot(Gs, gs, 'k-', label="InputImage") plt.legend() plt.show() import pdb pdb.set_trace()
output_num = nn_letters_train.get_outputs_num() layer_depths = [input_num, 3, 3, output_num] # depth of the network is the greatest layer depth net_depth = max(layer_depths) # number of layers in network is the length of the depths list net_layer = len(layer_depths) # each weight initialized to same value initial_weight = 0.5 # each neuron's bias initialized to same value initial_bias = 0.1 # learning rate N = 0.5 # create and display training dataset from nn_train_data.py train_data = nn_letters_train.create_train_set() print 'Training Data: ASCII art of a letter' print 'Output 1 if the image is an A, and 0 if it is not', '\n\n' for elem in train_data: print elem print '' # initialize a network net0 = nn_lib.Net(net_depth, net_layer, layer_depths, initial_weight, initial_bias, N) # one time only before running, set weights and biases to unique values of neuron's location net0.set_wb() # train the network net0.train(train_data)