import matplotlib.pyplot as plt import numpy as np import pylab import matplotlib.cm as cm import Image import load_class load = load_class.load(1) # f = pylab.figure() # for n, fname in enumerate(('1.png', '2.png')): # image=Image.open(fname).convert("L") # arr=np.asarray(image) # f.add_subplot(2, 1, n) # this line outputs images on top of each other # # f.add_subplot(1, 2, n) # this line outputs images side-by-side # pylab.imshow(arr,cmap=cm.Greys_r) # pylab.title('Double image') # pylab.show() x_validate, labels_validate = load.load_validation_set() class_to_filter=0 aantal = 0 for i, j in enumerate(labels_validate[0:1000]): if j == class_to_filter: f = pylab.figure() for k in range(12): f.add_subplot(3,4,k+1)
indices=[] for index in np.arange(len(inputs)): if ( oneshot_class > -1 and i < len(oneshot_indices[oneshot_class]) and index == oneshot_indices[oneshot_class][i]): i+=1 #print("Index {} overeenkomstig label {}".format(index,targets[index]));sys.stdout.flush() else: indices.append(index) if shuffle: np.random.shuffle(indices) for start_idx in range(0, len(indices) - batch_size + 1, batch_size): excerpt = indices[start_idx:start_idx + batch_size] yield inputs[excerpt], targets[excerpt] import load_class load = load_class.load(data_ratio) import convnet convnet = convnet.convnet(20) try: i = 0 path = "/home/jasper/oneshot-gestures/" while os.path.exists("{}output/acc-cost_{}.csv".format(path,i)): i += 1 fo1 = open("{}output/acc-cost_{}.csv".format(path,i), "w") fo1.write("training_loss;validation_loss;validation_accuracy;epoch_time\n") except IOError as e: print("I/O error({0}): {1}".format(e.errno, e.strerror)) raise except:
if ( i < len(oneshot_indices[oneshot_class]) and index == oneshot_indices[oneshot_class][i]): i+=1 else: indices.append(index) if shuffle: np.random.shuffle(indices) for start_idx in range(0, len(indices) - batch_size + 1, batch_size): excerpt = indices[start_idx:start_idx + batch_size] yield inputs[excerpt], targets[excerpt] base_dir_path = "/home/jasper/oneshot-gestures/" test_accuracies = [] load = load_class.load(size_ratio=1.0) # Load data x_validate, labels_validate, indices_validate = load.load_validation_set() x_train, labels_train, indices_train = load.load_training_set() x_test, labels_test, indices_test = load.load_testing_set() convnet = convnet.convnet(num_output_units=20) convnet.save_param_values("{}/default_param".format(base_dir_path)) for oneshot_class in xrange(20 ): print("Learning gestures excluding class {}".format(oneshot_class)) save_param_patch = "{}convnet_params/param-excl-class-{}".format(base_dir_path,oneshot_class) convnet.load_param_values(save_param_patch)