class AugmentingParallelBatchIterator(ParallelBatchIterator): """ Randomly changes images in the batch. Behaviour can be defined in params.py. """ def __init__(self, keys, batch_size, std, mean, coates_features=None, y_all=None, n_eyes=1): super(AugmentingParallelBatchIterator, self).__init__(keys, batch_size, std, mean, coates_features, y_all, n_eyes=n_eyes) # Initialize augmenter self.augmenter = Augmenter() def transform(self, Xb, yb): Xbb = self.augmenter.augment(Xb) # Do normalization in super-method Xbb, yb = super(AugmentingParallelBatchIterator, self).transform(Xbb, yb) return Xbb, yb
def read_and_augment(keys): augmenter = Augmenter() images = np.zeros( (len(keys),CHANNELS,PIXELS,PIXELS), dtype=np.float32) for i, key in enumerate(keys): image = scipy.misc.imread(IMAGE_SOURCE + "/" + 'train' + "/" + key + ".jpeg").transpose(2, 0, 1) image = image/256.0 images[i] = image return augmenter.augment(images)
def read_and_augment(keys): augmenter = Augmenter() images = np.zeros((len(keys), CHANNELS, PIXELS, PIXELS), dtype=np.float32) for i, key in enumerate(keys): image = scipy.misc.imread(IMAGE_SOURCE + "/" + 'train' + "/" + key + ".jpeg").transpose(2, 0, 1) image = image / 256.0 images[i] = image return augmenter.augment(images)
class AugmentingParallelBatchIterator(ParallelBatchIterator): """ Randomly changes images in the batch. Behaviour can be defined in params.py. """ def __init__(self, keys, batch_size, std, mean, coates_features = None, y_all = None, n_eyes=1): super(AugmentingParallelBatchIterator, self).__init__(keys, batch_size, std, mean, coates_features, y_all, n_eyes=n_eyes) # Initialize augmenter self.augmenter = Augmenter() def transform(self, Xb, yb): Xbb = self.augmenter.augment(Xb) # Do normalization in super-method Xbb, yb = super(AugmentingParallelBatchIterator, self).transform(Xbb, yb) return Xbb, yb
print "Training for {} epochs".format(num_epochs) curves = {'train_loss': [], 'val_loss': [], 'val_acc': []} for epoch in range(num_epochs): # In each epoch, we do a full pass over the training data... train_err = 0 train_batches = 0 start_time = time.time() aug_time = 0 for batch in tqdm(iterate_minibatches(train_X, train_y, 256, shuffle=True)): inputs, targets = batch if params.AUGMENT: pre_aug = time.time() inputs_augmented = a.augment(inputs) aug_time+= (time.time() - pre_aug) #Show unaugmented and augmented images #visualize_data(np.append(inputs[:8],inputs_augmented[:8],axis=0).transpose(0,2,3,1)) inputs_augmented = normalize.normalize(inputs_augmented, mean, std) train_err += train_fn(inputs_augmented, targets) else: #print inputs.shape,targets.shape train_err += train_fn(inputs, targets) train_batches += 1 #print "Augmentation time: ", aug_time # ...and a full pass over the validation data val_err = 0