def _initialize_layers(self, layer_args: Sequence[AttrDict]) -> None: self.layers = [] count = 0 for l_arg in layer_args[:-1]: count += 1 l = initialize_layer(**l_arg) self.layers.append(l)
def train(self, dataset: Dataset, epochs: int) -> None: """Train the neural network on using the provided dataset for `epochs` epochs. One epoch comprises one full pass through the entire dataset, or in case of stochastic gradient descent, one epoch comprises seeing as many samples from the dataset as there are elements in the dataset. Parameters ---------- dataset training dataset epochs number of epochs to train for """ # Initialize output layer args = self.layer_args[-1] args["n_out"] = dataset.out_dim output_layer = initialize_layer(**args) self.layers.append(output_layer) for i in range(epochs): training_loss = [] training_error = [] for _ in tqdm(range(dataset.train.samples_per_epoch)): X, Y = dataset.train.sample() Y_hat = self.forward(X) L = self.backward(np.array(Y), np.array(Y_hat)) error = self.error(Y, Y_hat) self.update(i) training_loss.append(L) training_error.append(error) training_loss = np.mean(training_loss) training_error = np.mean(training_error) self._log(training_loss, training_error) validation_loss = [] validation_error = [] for _ in range(dataset.validate.samples_per_epoch): X, Y = dataset.validate.sample() Y_hat = self.forward(X) L = self.loss.forward(Y, Y_hat) error = self.error(Y, Y_hat) validation_loss.append(L) validation_error.append(error) validation_loss = np.mean(validation_loss) validation_error = np.mean(validation_error) self._log(validation_loss, validation_error, validation=True) print("Example target: {}".format(Y[0])) print("Example prediction: {}".format( [round(x, 4) for x in Y_hat[0]])) print( "Epoch {} Training Loss: {} Training Accuracy: {} Val Loss: {} Val Accuracy: {}" .format( i, round(training_loss, 4), round(1 - training_error, 4), round(validation_loss, 4), round(1 - validation_error, 4), ))