Example #1
0
def validation_loop(val_loader, network, epoch, parameters, timer_epoch):
    """
    validation_loop do a loop over the validation set
    :param val_loader: Dataloader which contains input and target of the validation dataset
    :param network: Network that will be learned
    :param epoch: Actual epoch of the program
    :param parameters: List of parameters of the network
    :param timer_epoch: The time since the beginning of the epoch
    :return: The mean validation_error over the entire validation set. This function also save this error.
    """
    # Validation_error contains the error on the validation set
    validation_error = 0

    # Save the error of the validation DataSet
    for i, (x_val_batch, y_val_batch, _) in enumerate(val_loader):

        if torch.cuda.is_available(): # TODO It is useless to creat a Variable for y
            x_val_batch, y_val_batch = Variable(x_val_batch.cuda()), Variable(y_val_batch.cuda())
        else:
            x_val_batch, y_val_batch = Variable(x_val_batch), Variable(y_val_batch)

        validation_error += Save_import.save_error(x=x_val_batch, y=y_val_batch,
                                                   network=network,
                                                   epoch=epoch,
                                                   set_type="validation",
                                                   parameters=parameters)

        with open(parameters.path_print, 'a') as txtfile:
            txtfile.write(
                "\nEpoch : " + str(epoch) + ". Batch : " + str(i) + ".\nValidation error : " + str(
                    validation_error / (i + 1)) +
                ".\nTime total batch : " + Save_import.time_to_string(time.time() - timer_epoch) + "\n \n")

    # Divide by the the number of element in the entire batch
    return validation_error / (i + 1)
Example #2
0
def batch_loop(optimizer, train_loader, network, epoch, parameters, timer_batch, timer_epoch, inter_union=None):
    """
    :param optimizer: The optimiser that containt parameter of Adam optimizer
    :param train_loader: Dataloader which contains input and target of the train dataset
    :param network: Network that will be learned
    :param epoch: Actual epoch of the program
    :param parameters: List of parameters of the network
    :param timer_batch: The time since the beginning of the batch
    :param timer_epoch: The time since the beginning of the epoch
    :return: Nothing but update the network and save the train error
    """

    train_error = 0

    # Loop over the mini-batch, the size of the mini match is define in the train_loader
    for i, (x_batch, y_batch, _) in enumerate(train_loader):

        # zero the gradient buffers
        optimizer.zero_grad()

        # Transform into Variable
        if torch.cuda.is_available():
            x_batch, y_batch = Variable(x_batch.cuda()), Variable(y_batch.cuda())
        else:
            x_batch, y_batch = Variable(x_batch), Variable(y_batch)

        # Compute the forward function
        y_batch_estimated = network(x_batch)

        # Get the error
        loss = Loss_Error.criterion(y_estimated=y_batch_estimated,
                                    y=y_batch,
                                    parameters=parameters,
                                    global_IoU_modif=False)

        # Compute the backward function
        loss.backward()

        # Does the update according to the optimizer define above
        optimizer.step()

        # Save error of the training DataSet
        train_error += Save_import.save_error(x=x_batch, y=y_batch,
                                              network=network,
                                              epoch=epoch,
                                              set_type="train",
                                              parameters=parameters,
                                              loss=loss,
                                              y_estimated=y_batch_estimated)

        # Similar to a "print" but in a textfile
        with open(parameters.path_print, 'a') as txtfile:
            txtfile.write(
                "\nEpoch : " + str(epoch) + ". Batch : " + str(i) +
                ".\nTrain_Error : " + str(train_error / (i + 1)) +
                "\n" + "Time batch : " + Save_import.time_to_string(time.time() - timer_batch) +
                ".\nTime total batch : " + Save_import.time_to_string(time.time() - timer_epoch) + "\n \n")

        timer_batch = time.time()

    return ()