def _train_impl(self, X, y): if self.spec.is_convolution: X = X.reshape(X.shape[:3]) self.iterations = 0 data = zip(X, y) self.dataset = SequentialDataset(data) minibatches = MiniBatches(self.dataset, batch_size=20) self.trainer.run(minibatches, controllers=self.controllers) return self
char_vectors.append( np.eye(1, 26, char_code - ord("a"), dtype=FLOATX)[0]) if len(char_vectors) >= 20: continue word_matrix = np.vstack(char_vectors) data.append((word_matrix, label)) # Shuffle the data random.Random(3).shuffle(data) # Separate data valid_size = int(len(data) * 0.15) train_set = data[valid_size:] valid_set = data[:valid_size] dataset = SequentialDataset(train_set, valid=valid_set) dataset.pad_left(20) dataset.report() batch_set = MiniBatches(dataset) if __name__ == '__main__': model = NeuralClassifier(input_dim=26, input_tensor=3) model.stack( RNN(hidden_size=30, input_type="sequence", output_type="sequence", vector_core=0.1), RNN(hidden_size=30, input_type="sequence", output_type="sequence",