Exemplo n.º 1
0
    def setup_level(self, level_number):
        """Setup the game level."""

        self.level = Level(self, level_number)
        self.level.build()

        if not self.debug:
            utility.save_object(self, 'level_start')
Exemplo n.º 2
0
def create_encoding_deconding_dict(path_data):
    filenames = os.listdir(path_data)
    filenames = sorted(filenames)

    en_dict = {}
    counter = 0
    for fn in filenames:
        en_dict[fn[:-4].split('/')[-1].replace(' ', '_')] = counter
        counter += 1

    dec_dict = {v: k for k, v in en_dict.items()}

    save_object(en_dict, "saves_obj/en_dict.pk")
    save_object(dec_dict, "saves_obj/dec_dict.pk")

    return en_dict, dec_dict

    pass
def create_dict_nb_ligne(path, filenames=None):
    '''
    dictionnaire du nombre de ligne dans les fichiers csv
    :param path:
    :return:
    '''

    if filenames == None:
        filenames = os.listdir(path)

    dict_nb_ligne = {}

    for fn in filenames:
        n = sum(1 for line in open(path + fn)) - 1
        dict_nb_ligne[fn] = n

    save_object(dict_nb_ligne, "saves_obj/dict_nb_ligne.pk")

    return dict_nb_ligne
Exemplo n.º 4
0
def create_encoding_deconding_dict(path_data):
    '''
    Crée un dictionnaire d'encoding des labels et un dictionnaire de decoding des labels
    :param path_data:
    :return:
    '''
    filenames = os.listdir(path_data)
    filenames = sorted(filenames)

    en_dict = {}
    counter = 0
    for fn in filenames:
        en_dict[fn[:-4].split('/')[-1]] = counter
        counter += 1

    dec_dict = {v: k for k, v in en_dict.items()}

    save_object(en_dict, "saves_obj/en_dict.pk")
    save_object(dec_dict, "saves_obj/dec_dict.pk")

    return en_dict, dec_dict

    pass
Exemplo n.º 5
0
    def __exit__(self, exc_type, exc_value, exc_traceback):

        if not self.debug:
            utility.save_object(self, 'game_exit')
def train_model(model, train_loader,val_loader, n_epoch,scheduler,optimizer,criterion, use_gpu=False,
                path_save=None,path_start_from_existing_model=None,val_acc_class_save_name=None):




    if path_start_from_existing_model is not None and os.path.isfile(path_start_from_existing_model):

        # Loading state
        checkpoint = torch.load(path_start_from_existing_model)
        model.load_state_dict(checkpoint['model_state_dict'])
        optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
        next_epoch = checkpoint['epoch'] + 1
        loss = checkpoint['loss']
        history = checkpoint["history"]
        best_acc = checkpoint["best_acc"]
        best_model_weights = checkpoint["best_model_weights"]
        scheduler.load_state_dict(checkpoint["lr_scheduler_state"])

        print("Modèle chargé pour entraînement")

    else:
        # best_model_weights = copy.deepcopy(model.state_dict())
        history = History()
        next_epoch = 0
        best_acc=0
        print("Aucun modèle chargé pour entraînement")





    # Entrainement
    for epoch in range(0, n_epoch):
        model.train()
        scheduler.step()
        for j, batch in enumerate(train_loader):

            inputs, targets = batch
            if use_gpu:
                inputs = inputs.cuda()
                targets = targets.cuda()

            optimizer.zero_grad()
            output = model(inputs)

            loss = criterion(output, targets)
            loss.backward()
            optimizer.step()

        train_acc, train_loss, train_top3_score, train_conf_mat, train_acc_per_class=calcul_metric_concours(model,train_loader,use_gpu,show_acc_per_class=True)
        val_acc, val_loss, val_top3_score, val_conf_mat, val_acc_per_class = calcul_metric_concours(model,val_loader,use_gpu,show_acc_per_class=True)

        #Current LR
        for param_group in optimizer.param_groups:
            current_lr=param_group["lr"]



        history.save(train_acc, val_acc, train_loss, val_loss, current_lr)
        print('Epoch {} -Train acc: {:.2f} -Val acc: {:.2f} -Train loss: {:.4f} - Val loss: {:.4f} -Train score top3 : {:.4f} -Val score top3 : {:.4f}'.format(epoch,
                                                                                                              train_acc,
                                                                                                              val_acc,
                                                                                                              train_loss,
                                                                                                              val_loss,train_top3_score,val_top3_score))
        #Accuracy par classe
        print(val_acc_per_class)
        if val_acc_class_save_name is not None:
            save_object(val_acc_per_class,val_acc_save_name)



        #Best model
        if val_acc > best_acc:
            best_acc = val_acc
            best_model_weights = copy.deepcopy(model.state_dict())


        # Sauvegarde
        if path_save is not None:
            torch.save({
                'epoch': epoch,
                'model_state_dict': model.state_dict(),
                'optimizer_state_dict': optimizer.state_dict(),
                'loss': loss,
                "history": history,
                "best_acc": best_acc,
                "best_model_weights": best_model_weights,
                "lr_scheduler_state": scheduler.state_dict()

            }, path_save)