Exemplo n.º 1
0
def task_cv_single(t, modeLearn: ModeLearn, f:ActivFunct, theta:dict, errorFunct:ActivFunct, miniBatchDim = None):
    (trSet, vlSet) = t
    nn = NeuralNetwork(trSet, f, theta)
    nn.learn(modeLearn, errorFunct, miniBatchDim)

    vecErr = np.array([nn.getError(vlSet, i, 1/len(vlSet), errorFunct) for i in range(nn.hyp['OutputUnits'])])
    return norm(vecErr,2)
Exemplo n.º 2
0
def double_cross_validation(workers: int,
                            testFolder: int,
                            nFolder: int,
                            dataSet,
                            f: ActivFunct,
                            learnRate: list,
                            momRate: list,
                            regRate: list,
                            ValMax: list,
                            HiddenUnits: list,
                            OutputUnits: list,
                            MaxEpochs: list,
                            Tolerance: list,
                            startTime,
                            errorFunct=None,
                            modeLearn: ModeLearn = ModeLearn.BATCH,
                            miniBatchDim=None,
                            errorVlFunct=None,
                            hiddenF: ActivFunct = None):
    if testFolder <= 1:
        raise ValueError("Wrong value of num. folders inserted")

    cp = dataSet.copy()

    #Rimescolo il data set.
    rnd.shuffle(cp)

    #Costruisco la sottolista dei dati divisibile esattamente per testFolder.
    h = len(cp) - len(cp) % testFolder
    dataSetExact = cp[0:h]

    #Creo la lista dei folders.
    folderDim = int(len(dataSetExact) / testFolder)
    folders = [
        cp[i * folderDim:(i + 1) * folderDim] for i in range(testFolder)
    ]

    #Inserisco gli elementi di avanzo.
    for i in range(len(dataSet) - h):
        folders[i].append(cp[i + h])

    errList = list()
    for i in range(len(folders)):
        foldersCopy = folders.copy()
        testSet = foldersCopy[i]
        del (foldersCopy[i])

        vlSet = list()
        for j in range(len(foldersCopy)):
            vlSet += foldersCopy[j]

        e = cross_validation(workers,
                             nFolder,
                             modeLearn,
                             vlSet,
                             f,
                             errorFunct,
                             learnRate,
                             momRate,
                             regRate,
                             ValMax,
                             HiddenUnits,
                             OutputUnits,
                             MaxEpochs,
                             Tolerance,
                             startTime,
                             miniBatchDim=miniBatchDim,
                             errorVlFunct=errorVlFunct,
                             hiddenF=hiddenF)
        theta = getBestResult(e)[0]
        nn = NeuralNetwork(vlSet, f, new_hyp=theta, Hiddenf=hiddenF)
        (_, testErr, _, _) = nn.learn(modeLearn, errorFunct, miniBatchDim,
                                      testSet)
        errList.append(testErr[-1])

    return 1 / testFolder * sum(errList)
Exemplo n.º 3
0
def k_fold_CV_single(k: int,
                     dataSet,
                     f: ActivFunct,
                     theta,
                     errorFunct=None,
                     modeLearn: ModeLearn = ModeLearn.BATCH,
                     miniBatchDim=None,
                     errorVlFunct=None,
                     hiddenF: ActivFunct = None):
    if k <= 0:
        raise ValueError("Wrong value of num. folders inserted")

    cp = dataSet.copy()

    #Rimescolo il data set.
    rnd.shuffle(cp)

    #Costruisco la sottolista dei dati divisibile esattamente per k.
    h = len(cp) - len(cp) % k
    dataSetExact = cp[0:h]

    #Creo la lista dei folders.
    folderDim = int(len(dataSetExact) / k)
    folders = [cp[i * folderDim:(i + 1) * folderDim] for i in range(k)]

    #Inserisco gli elementi di avanzo.
    for i in range(len(cp) - h):
        folders[i].append(cp[i + h])

    errore = list()

    #per stampare l'errore sul traininig set e sul validation set
    trErrorPlot = list()
    vlErrorPlot = list()

    for i in range(len(folders)):
        lcopy = folders.copy()
        del (lcopy[i])

        vlSet = folders[i]
        trSet = list()
        for j in range(len(lcopy)):
            trSet += lcopy[j]
        nn = NeuralNetwork(trSet, f, theta, Hiddenf=hiddenF)
        (trErr, vlErr, trAcc, vlAcc) = nn.learn(modeLearn,
                                                errorFunct,
                                                miniBatchDim,
                                                vlSet,
                                                errorVlFunct=errorVlFunct)
        trErrorPlot.append(trErr)
        vlErrorPlot.append(vlErr)

        errore.append(nn.getError(vlSet, 1 / len(vlSet), errorVlFunct))

    err = sum(errore) / k

    #controllo che tutti gli errorPlot abbiano la stessa lunghezza
    maxLen = len(trErrorPlot[0])
    for i in range(1, len(trErrorPlot)):
        if len(trErrorPlot[i]) > maxLen:
            maxLen = len(trErrorPlot[i])

    for i in range(len(trErrorPlot)):
        if len(trErrorPlot[i]) < maxLen:
            for j in range(maxLen - len(trErrorPlot[i])):
                trErrorPlot[i].append(trErrorPlot[i][-1])
                vlErrorPlot[i].append(vlErrorPlot[i][-1])

    trErrorArray = np.array(trErrorPlot[0])
    vlErrorArray = np.array(vlErrorPlot[0])

    for i in range(1, len(trErrorPlot)):
        trErrorArray = trErrorArray + np.array(trErrorPlot[i])
    trErrorArray = trErrorArray / k

    for i in range(1, len(vlErrorPlot)):
        vlErrorArray = vlErrorArray + np.array(vlErrorPlot[i])
    vlErrorArray = vlErrorArray / k

    return (err, trErrorArray, vlErrorArray)