Ejemplo n.º 1
0
    def trainNN(self, classData, dialog):
        maxIter = 250
        nHidden = 10
        seed = np.random.randint(0, 1000000)

        trainAUC = np.zeros((self.nFold, maxIter+1))
        validAUC = np.zeros((self.nFold, maxIter+1))

        partitionGenerator = ml.part.classRandomSubSample(classData,
                self.trainFrac, self.nFold)

        for fold, trainData, validData in partitionGenerator:
            dialog.Update(fold, 'Validation Fold: %d' % fold)

            def validTraceCB(optable, iteration, paramTrace, errorTrace, success=True):
                if success:
                    trainAUC[fold,validTraceCB.it:] = optable.auc(trainData)
                    validAUC[fold,validTraceCB.it:] = optable.auc(validData)
                    validTraceCB.it += 1
            validTraceCB.it = 0

            s = np.random.get_state()
            np.random.seed(seed)
            classifier = ml.FNS(trainData, accuracy=0.0, precision=1.0e-10,
                                nHidden=nHidden, maxIter=maxIter, optimFunc=ml.optim.scg,
                                callback=validTraceCB, eTrace=True, verbose=False)
            np.random.set_state(s)

        dialog.Update(self.nFold, 'Training Final Classifier')

        meanValidAUC  = np.mean(validAUC, axis=0)

        bestIter = np.argmax(meanValidAUC)

        bestMeanValidAUC = meanValidAUC[bestIter]

        s = np.random.get_state()
        np.random.seed(seed)
        self.classifier = ml.FNS(classData, accuracy=0.0, precision=1.0e-10,
                                 nHidden=nHidden, maxIter=bestIter, optimFunc=ml.optim.scg,
                                 eTrace=False, verbose=False)
        np.random.set_state(s)

        finalAUC = self.classifier.auc(classData)

        dialog.Destroy()

        wx.MessageBox(message=('Best Iteration: %f\n' % bestIter) +
            ('Mean Validation AUC: %f\n' % bestMeanValidAUC) +
            ('Final Training AUC: %f' % finalAUC),
            caption='Training Completed!', style=wx.OK | wx.ICON_INFORMATION)
Ejemplo n.º 2
0
    def trainWelchNN(self, trainData):
        maxIter = 250
        nHidden = 10
        seed = np.random.randint(0, 1000000)

        self.stand = ml.ClassStandardizer(trainData)
        trainDataStd = self.stand.apply(trainData)

        nFold = self.nTrainTrial

        trnCA = np.zeros((nFold, maxIter + 1))
        valCA = np.zeros((nFold, maxIter + 1))

        dialog = wx.ProgressDialog('Training Classifier',
                                   'Featurizing',
                                   maximum=nFold + 1,
                                   style=wx.PD_ELAPSED_TIME | wx.PD_SMOOTH)

        for fold, trnData, valData in ml.part.classStratified(trainDataStd,
                                                              nFold=nFold):
            dialog.Update(fold, 'Validation Fold: %d' % fold)

            stand = ml.ClassStandardizer(trnData)
            trnData = stand.apply(trnData)
            valData = stand.apply(valData)

            def valTraceCB(optable,
                           iteration,
                           paramTrace,
                           errorTrace,
                           success=True):
                if success:
                    trnCA[fold, valTraceCB.it] = optable.ca(trnData)
                    valCA[fold, valTraceCB.it] = optable.ca(valData)
                    valTraceCB.it += 1

            valTraceCB.it = 0

            np.random.seed(seed)
            classifier = ml.FNS(trnData,
                                accuracy=0.0,
                                precision=0.0,
                                nHidden=nHidden,
                                maxIter=maxIter,
                                optimFunc=ml.optim.scg,
                                callback=valTraceCB,
                                eTrace=True,
                                verbose=False)

        dialog.Update(nFold, 'Training Final Classifier')

        meanTrnCA = np.mean(trnCA, axis=0)
        meanValCA = np.mean(valCA, axis=0)

        bestIter = np.argmax(meanValCA)

        bestMeanTrnCA = meanTrnCA[bestIter]
        bestMeanValCA = meanValCA[bestIter]

        np.random.seed(seed)
        self.classifier = ml.FNS(trainDataStd,
                                 accuracy=0.0,
                                 precision=0.0,
                                 nHidden=nHidden,
                                 maxIter=bestIter,
                                 optimFunc=ml.optim.scg,
                                 eTrace=False,
                                 verbose=False)

        trainCA = self.classifier.ca(trainDataStd)
        trainConfusion = np.round(100 *
                                  self.classifier.confusion(trainDataStd))

        dialog.Destroy()

        resultText = (('Best Num Iterations: %f\n' % bestIter) +
                      ('Best Mean Training CA: %f\n' % bestMeanTrnCA) +
                      ('Best Mean Validation CA: %f\n' % bestMeanValCA) +
                      ('Final Training CA: %f\n' % trainCA) +
                      ('Confusion Matrix:\n' + str(trainConfusion) + '\n') +
                      ('Choices: ' + str(self.choices)))

        wx.MessageBox(message=resultText,
                      caption='Training Completed!',
                      style=wx.OK | wx.ICON_INFORMATION)

        self.saveResultText(resultText)
Ejemplo n.º 3
0
Archivo: pieern.py Proyecto: idfah/cebl
    def trainNN(self, trainData, dialog):
        maxIter = 250
        nHidden = 10
        seed = np.random.randint(0, 1000000)

        trnCA = np.zeros((self.nFold, maxIter + 1))
        valCA = np.zeros((self.nFold, maxIter + 1))

        for fold, trnData, valData in ml.part.classStratified(
                trainData, nFold=self.nFold):
            dialog.Update(fold, 'Validation Fold: %d' % fold)

            stand = ml.ClassStandardizer(trnData)
            trnData = stand.apply(trnData)
            valData = stand.apply(valData)

            def valTraceCB(optable,
                           iteration,
                           paramTrace,
                           errorTrace,
                           success=True):
                if success:
                    trnCA[fold, valTraceCB.it] = optable.ca(trnData)
                    valCA[fold, valTraceCB.it] = optable.ca(valData)
                    valTraceCB.it += 1

            valTraceCB.it = 0

            np.random.seed(seed)
            classifier = ml.FNS(trnData,
                                accuracy=0.0,
                                precision=0.0,
                                nHidden=nHidden,
                                maxIter=maxIter,
                                optimFunc=ml.optim.scg,
                                callback=valTraceCB,
                                eTrace=True,
                                verbose=False)

        dialog.Update(self.nFold, 'Training Final Classifier')

        meanValCA = np.mean(valCA, axis=0)

        bestIter = np.argmax(meanValCA)

        bestMeanValCA = meanValCA[bestIter]

        self.stand = ml.ClassStandardizer(trainData)
        trainData = self.stand.apply(trainData)

        np.random.seed(seed)
        self.classifier = ml.FNS(trainData,
                                 accuracy=0.0,
                                 precision=0.0,
                                 nHidden=nHidden,
                                 maxIter=bestIter,
                                 optimFunc=ml.optim.scg,
                                 eTrace=False,
                                 verbose=False)

        trainCA = self.classifier.ca(trainData)
        trainConfusion = self.classifier.confusion(trainData)

        dialog.Destroy()

        wx.MessageBox(message=('Best Iteration: %f\n' % bestIter) +
                      ('Mean Validation CA: %f\n' % bestMeanValCA) +
                      ('Final Training CA: %f' % trainCA),
                      caption='Training Completed!',
                      style=wx.OK | wx.ICON_INFORMATION)