コード例 #1
0
ファイル: SAMTesting.py プロジェクト: robotology/icub-hri
def singleRecall(thisModel,
                 testInstance,
                 verbose,
                 visualiseInfo=None,
                 optimise=100):
    """
        Method that performs classification for single model implementations.
    
        This method returns the classification label of a test instance by calculating the predictive mean and variance of the backwards mapping and subsequently decides whether the test instance is first known or unknown and if known its most probable classification label.
        
        Args:
            thisModel: SAMObject model to recall from.
            testInstance: Novel feature vector to test.
            verbose: Enable or disable logging to stdout.
            visualiseInfo: None to disable plotting and plotObject to display plot of recall.
            optimise: Number of optimisation iterations to perform during recall.

        Returns:
            Classification label and variance if __calibrateUnknown__ is set to `False` in the config file. Otherwise returns classification label and normalised classification probability.
    """
    #
    # mm,vv,pp=self.SAMObject.pattern_completion(testFace, visualiseInfo=visualiseInfo)
    # if verbose:
    # logging.info('single model recall'
    textStringOut = ''
    # normalize incoming data
    testValue = testInstance - thisModel.Ymean
    testValue /= thisModel.Ystd

    try:
        ret = thisModel.SAMObject.pattern_completion(
            testValue, visualiseInfo=visualiseInfo, optimise=optimise)
    except IndexError:
        return ['unknown', 0]
    mm = ret[0]
    vv = list(ret[1][0])
    svv = sum(vv)
    mvv = svv / len(vv)
    vv.append(svv)
    vv.append(mvv)

    # find nearest neighbour of mm and SAMObject.model.X

    k = np.matlib.repmat(mm[0].values,
                         thisModel.SAMObject.model.X.mean.shape[0], 1)
    pow2 = np.power(thisModel.SAMObject.model.X.mean - k, 2)
    s = np.power(np.sum(pow2, 1), 0.5)
    nn = np.argmin(s)
    min_value = s[nn]

    if thisModel.SAMObject.type == 'mrd':
        classLabel = thisModel.textLabels[int(
            thisModel.SAMObject.model.bgplvms[1].Y[nn, :])]
    elif thisModel.SAMObject.type == 'bgplvm':
        classLabel = thisModel.textLabels[int(thisModel.L[nn, :])]

    known = True
    if thisModel.calibrated:
        if thisModel.useMaxDistance:
            known = utils.varianceClass(
                thisModel.classificationDict['varianceDirection'],
                vv[thisModel.classificationDict['bestDistanceIDX']],
                thisModel.classificationDict['varianceThreshold'])

            details = str(thisModel.classificationDict['varianceThreshold']) + ' ' + \
                      str(thisModel.classificationDict['varianceDirection'])

            probClass = vv[thisModel.classificationDict['bestDistanceIDX']]
        else:
            P_Known_given_X = utils.PfromHist(
                vv[:-2], thisModel.classificationDict['histKnown'],
                thisModel.classificationDict['binWidth'])
            P_Unknown_given_X = utils.PfromHist(
                vv[:-2], thisModel.classificationDict['histUnknown'],
                thisModel.classificationDict['binWidth'])

            if thisModel.classificationDict['method'] == 'mulProb':
                s1 = reduce(lambda x, y: x * y, P_Known_given_X)
                s2 = reduce(lambda x, y: x * y, P_Unknown_given_X)
                known = s1 > s2
            else:
                s1 = np.sum(P_Known_given_X)
                s2 = np.sum(P_Unknown_given_X)
                known = s1 > s2

            if known:
                probClass = s1
                details = s1, ' > ', s2
            else:
                probClass = s2
                details = s2, ' > ', s1

    if thisModel.calibrated:
        if known:
            textStringOut = classLabel
        else:
            textStringOut = 'unknown'
            runnerUp = classLabel
    else:
        textStringOut = classLabel

    if verbose:
        if thisModel.calibrated:
            if textStringOut == 'unknown':
                logging.info("With " + str(probClass) +
                             " prob. error the new instance is " +
                             str(runnerUp))
                logging.info('But ' + str(details) + ' than ' +
                             str(probClass) + ' so class as ' +
                             str(textStringOut))
            else:
                logging.info("With " + str(probClass) +
                             " prob. error the new instance is " +
                             str(textStringOut))
        else:
            logging.info("With " + str(vv) +
                         " prob. error the new instance is " +
                         str(textStringOut))

    if thisModel.calibrated:
        return [textStringOut, probClass / len(vv)]
    else:
        return [textStringOut, vv]
コード例 #2
0
ファイル: SAMTesting.py プロジェクト: pcannon67/wysiwyd
def singleRecall(thisModel, testInstance, verbose, visualiseInfo=None, optimise=100):
    # Returns the predictive mean, the predictive variance and the axis (pp) of the latent space backwards mapping.
    # mm,vv,pp=self.SAMObject.pattern_completion(testFace, visualiseInfo=visualiseInfo)
    # if verbose:
    # logging.info('single model recall'
    textStringOut = ''
    # normalize incoming data
    testValue = testInstance - thisModel.Ymean
    testValue /= thisModel.Ystd

    try:
        ret = thisModel.SAMObject.pattern_completion(testValue, visualiseInfo=visualiseInfo, optimise=optimise)
    except IndexError:
        return ['unknown', 0]
    mm = ret[0]
    vv = list(ret[1][0])
    svv = sum(vv)
    mvv = svv/len(vv)
    vv.append(svv)
    vv.append(mvv)

    # find nearest neighbour of mm and SAMObject.model.X

    k = np.matlib.repmat(mm[0].values, thisModel.SAMObject.model.X.mean.shape[0], 1)
    pow2 = np.power(thisModel.SAMObject.model.X.mean - k, 2)
    s = np.power(np.sum(pow2, 1), 0.5)
    nn = np.argmin(s)
    min_value = s[nn]

    if thisModel.SAMObject.type == 'mrd':
        classLabel = thisModel.textLabels[int(thisModel.SAMObject.model.bgplvms[1].Y[nn, :])]
    elif thisModel.SAMObject.type == 'bgplvm':
        classLabel = thisModel.textLabels[int(thisModel.L[nn, :])]

    known = True
    if thisModel.calibrated:
        if thisModel.useMaxDistance:
            known = utils.varianceClass(thisModel.classificationDict['varianceDirection'],
                                vv[thisModel.classificationDict['bestDistanceIDX']],
                                thisModel.classificationDict['varianceThreshold'])

            details = str(thisModel.classificationDict['varianceThreshold']) + ' ' + \
                      str(thisModel.classificationDict['varianceDirection'])

            probClass = vv[thisModel.classificationDict['bestDistanceIDX']]
        else:
            P_Known_given_X = utils.PfromHist(vv[:-2], thisModel.classificationDict['histKnown'],
                                              thisModel.classificationDict['binWidth'])
            P_Unknown_given_X = utils.PfromHist(vv[:-2], thisModel.classificationDict['histUnknown'],
                                                thisModel.classificationDict['binWidth'])

            if thisModel.classificationDict['method'] == 'mulProb':
                s1 = reduce(lambda x, y: x * y, P_Known_given_X)
                s2 = reduce(lambda x, y: x * y, P_Unknown_given_X)
                known = s1 > s2
            else:
                s1 = np.sum(P_Known_given_X)
                s2 = np.sum(P_Unknown_given_X)
                known = s1 > s2

            if known:
                probClass = s1
                details = s1, ' > ', s2
            else:
                probClass = s2
                details = s2, ' > ', s1

    if thisModel.calibrated:
        if known:
            textStringOut = classLabel
        else:
            textStringOut = 'unknown'
            runnerUp = classLabel
    else:
        textStringOut = classLabel

    if verbose:
        if thisModel.calibrated:
            if textStringOut == 'unknown':
                logging.info("With " + str(probClass) + " prob. error the new instance is " + str(runnerUp))
                logging.info('But ' + str(details) + ' than ' + str(probClass) + ' so class as ' + str(textStringOut))
            else:
                logging.info("With " + str(probClass) + " prob. error the new instance is " + str(textStringOut))
        else:
            logging.info("With " + str(vv) + " prob. error the new instance is " + str(textStringOut))

    if thisModel.calibrated:
        return [textStringOut, probClass/len(vv)]
    else:
        return [textStringOut, vv]