def get_testing_data(self): data_dict = gd.unpickle(self.file_loc+"test_batch") input_data = data_dict[b'data'] input_label = data_dict[b'labels'] self.test_label = np.asarray(input_label) input_data = (input_data.astype(np.float32)/255.0 - 0.5)/0.5 img_R = input_data[:,0:1024].reshape((-1,32, 32,1)) img_G = input_data[:,1024:2048].reshape((-1,32, 32,1)) img_B = input_data[:,2048:3072].reshape((-1,32, 32,1)) self.test_data = np.concatenate((img_R,img_G,img_B),3)
def get_noisy_data(self): for i in range(5): data_dict = gd.unpickle(self.file_loc+"data_batch_"+str(i+1)) input_data = data_dict[b'data'] input_label = data_dict[b'labels'] self.train_label[i] = np.asarray(input_label) input_data = (input_data.astype(np.float32)/255.0 - 0.5)/0.5 img_R = input_data[:,0:1024].reshape((-1,32, 32,1))+np.random.normal(scale=0.01, size=[32, 32, 1]) img_G = input_data[:,1024:2048].reshape((-1,32, 32,1))+np.random.normal(scale=0.01, size=[32, 32, 1]) img_B = input_data[:,2048:3072].reshape((-1,32, 32,1))+np.random.normal(scale=0.01, size=[32, 32, 1]) self.train_data[i] = np.concatenate((img_R,img_G,img_B),3)
def get_occluded_data(self, p): data_dict = gd.unpickle(self.file_loc+"test_batch") input_data = data_dict[b'data'] input_label = data_dict[b'labels'] self.test_label = np.asarray(input_label) input_data = (input_data.astype(np.float32)/255.0 - 0.5)/0.5 img_R = input_data[:,0:1024].reshape((-1,32, 32,1)) img_R[:, (p/(33-self.p_size)):(p/(33-self.p_size))+self.p_size, (p%(33-self.p_size)):(p%(33-self.p_size))+self.p_size, 0] = 0.5 img_G = input_data[:,1024:2048].reshape((-1,32, 32,1)) img_G[:, (p/(33-self.p_size)):(p/(33-self.p_size))+self.p_size, (p%(33-self.p_size)):(p%(33-self.p_size))+self.p_size, 0] = 0.5 img_B = input_data[:,2048:3072].reshape((-1,32, 32,1)) img_B[:, (p/(33-self.p_size)):(p/(33-self.p_size))+self.p_size, (p%(33-self.p_size)):(p%(33-self.p_size))+self.p_size, 0] = 0.5 self.test_data = np.concatenate((img_R,img_G,img_B),3)
def get_rotated_data(self): for i in range(5): data_dict = gd.unpickle(self.file_loc+"data_batch_"+str(i+1)) input_data = data_dict[b'data'] input_label = data_dict[b'labels'] self.train_label[i] = np.asarray(input_label) input_data = (input_data.astype(np.float32)/255.0 - 0.5)/0.5 if np.random.randint(2): img_R = np.transpose(input_data[:,0:1024].reshape((-1,32, 32,1)), [0, 2, 1, 3]) img_G = np.transpose(input_data[:,1024:2048].reshape((-1,32, 32,1)), [0, 2, 1, 3]) img_B = np.transpose(input_data[:,2048:3072].reshape((-1,32, 32,1)), [0, 2, 1, 3]) else: img_R = input_data[:,0:1024].reshape((-1,32, 32,1)) img_G = input_data[:,1024:2048].reshape((-1,32, 32,1)) img_B = input_data[:,2048:3072].reshape((-1,32, 32,1)) self.train_data[i] = np.concatenate((img_R,img_G,img_B),3)
def get_training_data(self): for i in range(5): if not os.path.isfile('./cifar-10-python.tar.gz'): print "[!] File not found" print "[*] Downloading" fname, headers = urlretrieve('https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz', './cifar-10-python.tar.gz') if (fname.endswith('tar.gz')): tar = tarfile.open(fname, 'tar.gz') tar.extractall() tar.close() print "[*] Extracted" self.file_loc = "./cifar-10-batches-py/" data_dict = gd.unpickle(self.file_loc+"data_batch_"+str(i+1)) input_data = data_dict[b'data'] input_label = data_dict[b'labels'] self.train_label[i] = np.asarray(input_label) input_data = (input_data.astype(np.float32)/255.0 - 0.5)/0.5 self.patch = self.get_patch() img_R = input_data[:,0:1024].reshape((-1,32, 32,1))*self.patch#+np.random.normal(scale=0.01, size=[32, 32, 1]) img_G = input_data[:,1024:2048].reshape((-1,32, 32,1))*self.patch#+np.random.normal(scale=0.01, size=[32, 32, 1]) img_B = input_data[:,2048:3072].reshape((-1,32, 32,1))*self.patch#+np.random.normal(scale=0.01, size=[32, 32, 1]) self.train_data[i] = np.concatenate((img_R,img_G,img_B),3)