示例#1
0
def classificazione(directory,filename, version,exp_name,name, model,lr, epochs,  momentum, batch_size, resize):
    print("Classificazione")
    #directory "Class"
    directory =directory
    version=version
    lr=lr
    epochs=epochs
    momentum=momentum
    batch_size = batch_size
    resize=resize
    controlFileCSVBase()
    
    dataSetClass = DatasetClassi(resize)
    dataSetClass.controlNormalize()
    
    train = dataSetClass.dataset_train_norm
    validation = dataSetClass.dataset_valid_norm
    test = dataSetClass.dataset_test_norm
    print("Numeri campioni",len(train))
    createFolder(directory)
    createFolder(directory+"\\"+version)
    writeJsonModelInit1(directory,name,version) 
    
    money_train_loader = DataLoader(train, batch_size=batch_size, num_workers=0, shuffle=True)
    money_test_loader = DataLoader(test, batch_size=batch_size, num_workers=0)
    money_val_loader = DataLoader(validation , batch_size = batch_size, num_workers=0)
    print("Numero di batch", len(money_train_loader))
    modello ,f, last_loss_train, last_loss_val, last_acc_train, last_acc_val = train_class(directory,version, model, money_train_loader, money_val_loader,resize, batch_size, exp_name , lr=lr, epochs = epochs)
    print("Time computing", f)
    print("last_loss_train",last_loss_train)
    print("last_loss_val",last_loss_val)
    print("last_acc_train",last_acc_train)
    print("last_acc_val",last_acc_val)
    
    hyperparametr = {"indexEpoch":epochs-1,"lr":lr, "momentum" : momentum, "batchSize":batch_size }
    contrastiveLoss = {"lossTrain": last_loss_train, "lossValid":last_loss_val}
    accuracy = {"accuracyTrain":last_acc_train , "accuracyValid":last_acc_val }
    time = {"training": f}
    
    
    writeJsonModelClass(directory,name,version, hyperparametr,resize,batch_size, contrastiveLoss, accuracy ,time)
    
    path = exp_name+".pth"
    model_test = torch.load(path)
    
    """
示例#2
0
def train_model_margine_dynamik(directory, filename, version, exp_name, name,
                                model, lr, epochs, momentum, batch_size,
                                resize):
    dataSetPair = DataSetPairCreate(resize)
    dataSetPair.controlNormalize()

    pair_train = dataSetPair.pair_money_train
    pair_test = dataSetPair.pair_money_test
    pair_validation = dataSetPair.pair_money_val

    pair_money_train_loader = DataLoader(pair_train,
                                         batch_size=batch_size,
                                         num_workers=0,
                                         shuffle=True)
    pair_money_test_loader = DataLoader(pair_test,
                                        batch_size=batch_size,
                                        num_workers=0)
    pair_money_val_loader = DataLoader(pair_validation,
                                       batch_size=batch_size,
                                       num_workers=0)

    #training
    #modello, tempo di training, loss su train, loss su val
    createFolder(directory + "\\" + version)
    writeJsonModelInit1(directory, name, version)

    print("Training...")

    modello, f, last_loss_train, last_loss_val, last_acc_train, last_acc_val = train_margine_dynamik(
        directory,
        version,
        model,
        pair_money_train_loader,
        pair_money_val_loader,
        resize,
        batch_size,
        exp_name,
        lr=lr,
        epochs=epochs)

    print("Time computing", f)
    print("last_loss_train", last_loss_train)
    print("last_loss_val", last_loss_val)
    print("last_acc_train", last_acc_train)
    print("last_acc_val", last_acc_val)

    hyperparametr = {
        "indexEpoch": epochs - 1,
        "lr": lr,
        "momentum": momentum,
        "numSampleTrain": len(pair_train)
    }
    contrastiveLoss = {
        "lossTrain": last_loss_train,
        "lossValid": last_loss_val
    }
    accuracy = {"accuracyTrain": last_acc_train, "accuracyValid": last_acc_val}
    time = {"training": f}

    writeJsonModelClass(directory, name, version, hyperparametr, resize,
                        batch_size, contrastiveLoss, accuracy, time)

    namep = exp_name + ".pth"
    siamese_model = torch.load(namep)

    print("Testing on Validation set")

    timeVal, pair_prediction_val, pair_label_val = test_margine_dynamik(
        siamese_model, pair_money_val_loader)

    numSimilPredette = np.sum(pair_prediction_val == 0)
    print("Num Simili predette", numSimilPredette)
    numDissimilPredette = np.sum(pair_prediction_val == 1)
    print("Num Dissimil predette", numDissimilPredette)
    numSimilReali = np.sum(pair_label_val == 0)
    print("Num Simili Reali", numSimilReali)
    numDissimilReali = np.sum(pair_label_val == 1)
    print("Num Dissimil Reali", numDissimilReali)

    #calculate Accuracy
    print(pair_prediction_val[0:10])
    print(pair_label_val[0:10])
    accuracyVal = accuracy_score(pair_label_val, pair_prediction_val)
    print("Accuarcy di test: %0.4f" % accuracyVal)
    #calculate Precision
    precisionVal = precision_score(pair_label_val, pair_prediction_val)
    print("Precision di test: %0.4f" % precisionVal)
    #calculate Recall
    recallVal = recall_score(pair_label_val, pair_prediction_val)
    print("Recall di test: %0.4f" % recallVal)
    #calculate F1 score
    if recallVal != 0.0 and precisionVal != 0.0:

        scores_testing_val = f1_score(pair_label_val,
                                      pair_prediction_val,
                                      average=None)
        scores_testing_val = scores_testing_val.mean()
        print("mF1 score di testing: %0.4f" % scores_testing_val)

    else:
        scores_testing_val = 0.000
        print("mscoref1", scores_testing_val)

    key = ["accuracy", "precision", "recall", "mf1_score", "time"]
    entry = [
        "accuracyVal", "precisionVal", "recallVal", "f1_score_Val", "testVal"
    ]
    value = [accuracyVal, precisionVal, recallVal, scores_testing_val, timeVal]
    addValueJsonModel(directory + "modelTrained.json", version, key[0],
                      entry[0], value[0])
    addValueJsonModel(directory + "modelTrained.json", version, key[1],
                      entry[1], value[1])
    addValueJsonModel(directory + "modelTrained.json", version, key[2],
                      entry[2], value[2])
    addValueJsonModel(directory + "modelTrained.json", version, key[3],
                      entry[3], value[3])
    addValueJsonModel(directory + "modelTrained.json", version, key[4],
                      entry[4], value[4])
示例#3
0
def train_model_margine_double(directory,
                               filename,
                               version,
                               exp_name,
                               name,
                               model,
                               lr,
                               epochs,
                               momentum,
                               batch_size,
                               resize,
                               decay=None,
                               margin1=None,
                               margin2=None,
                               modeLoss=None):
    # directory es "Model-6"
    createFolder(directory)
    dataSetPair = DataSetPairCreate(resize)
    dataSetPair.controlNormalize()

    pair_train = dataSetPair.pair_money_train
    pair_test = dataSetPair.pair_money_test
    pair_validation = dataSetPair.pair_money_val

    pair_money_train_loader = DataLoader(pair_train,
                                         batch_size=batch_size,
                                         num_workers=0,
                                         shuffle=True)
    pair_money_test_loader = DataLoader(pair_test,
                                        batch_size=batch_size,
                                        num_workers=0)
    pair_money_val_loader = DataLoader(pair_validation,
                                       batch_size=batch_size,
                                       num_workers=0)

    siamese_money = model  # modello

    createFolder(directory + "\\" + version)
    createFolder(directory + "\\" + version + "\\" + "Metod2")

    writeJsonModelInit1(directory, name, version)

    print("Training...")

    modello, f, last_loss_train, last_loss_val, last_acc_train, last_acc_val = train_siamese_margin_double(
        directory,
        version,
        siamese_money,
        pair_money_train_loader,
        pair_money_val_loader,
        resize,
        batch_size,
        exp_name=exp_name,
        lr=lr,
        epochs=epochs,
        momentum=momentum,
        margin1=margin1,
        margin2=margin2,
        logdir='logs',
        decay=decay,
        modeLoss=modeLoss)

    print("Time computing", f)
    print("last_loss_train", last_loss_train)
    print("last_loss_val", last_loss_val)
    print("last_acc_train", last_acc_train)
    print("last_acc_val", last_acc_val)
    if not decay is None:

        hyperparametr = {
            "indexEpoch": epochs - 1,
            "lr": lr,
            "decay": decay,
            "momentum": momentum,
            "numSampleTrain": len(pair_train)
        }
    else:
        hyperparametr = {
            "indexEpoch": epochs - 1,
            "lr": lr,
            "momentum": momentum,
            "numSampleTrain": len(pair_train)
        }

    contrastiveLoss = {
        "lossTrain": last_loss_train,
        "lossValid": last_loss_val
    }
    accuracy = {"accuracyTrain": last_acc_train, "accuracyValid": last_acc_val}
    time = {"training": f}

    writeJsonModelClass(directory, name, version, hyperparametr, resize,
                        resize, batch_size, contrastiveLoss, accuracy, time)