Example #1
0
 def truePositiveRate(self,classIndex:int):
     correct=total=0
     for j in range(self.m_NumClasses):
         if j == classIndex:
             correct+=self.m_ConfusionMatrix[classIndex][j]
         total+=self.m_ConfusionMatrix[classIndex][j]
     return Utils.division(correct, total)
Example #2
0
 def precision(self,classIndex:int):
     correct=total=0
     for i in range(self.m_NumClasses):
         if i == classIndex:
             correct+=self.m_ConfusionMatrix[i][classIndex]
         total+=self.m_ConfusionMatrix[i][classIndex]
     return Utils.division(correct, total)
Example #3
0
 def falsePositiveRate(self,classIndex:int):
     incorrect=total=0
     for i in range(self.m_NumClasses):
         if i != classIndex:
             for j in range(self.m_NumClasses):
                 if j == classIndex:
                     incorrect+=self.m_ConfusionMatrix[i][j]
                 total+=self.m_ConfusionMatrix[i][j]
     return Utils.division(incorrect, total)
Example #4
0
 def matthewsCorrelationCoefficient(self,classIndex:int):
     numTP=self.numTruePositives(classIndex)
     numTN=self.numTrueNegatives(classIndex)
     numFP=self.numFalsePositives(classIndex)
     numFN=self.numFalseNegatives(classIndex)
     n=numTP*numTN-numFP*numFN
     d=(numTP+numFP)*(numTP+numFN)*(numTN+numFP)*(numTN+numFN)
     d=math.sqrt(d)
     return Utils.division(n, d)
Example #5
0
 def weightedAreaUnderPRC(self):
     classCounts = [0] * self.m_NumClasses
     classCountSum = 0
     for i in range(self.m_NumClasses):
         for j in range(self.m_NumClasses):
             classCounts[i] += self.m_ConfusionMatrix[i][j]
         classCountSum += classCounts[i]
     auprcTotal = 0
     for i in range(self.m_NumClasses):
         temp = self.areaUnderPRC(i)
         if classCounts[i] > 0:
             auprcTotal += temp * classCounts[i]
     return Utils.division(auprcTotal, classCountSum)
Example #6
0
 def weightedMatthewsCorrelation(self):
     classCounts = [0] * self.m_NumClasses
     classCountSum = 0
     for i in range(self.m_NumClasses):
         for j in range(self.m_NumClasses):
             classCounts[i] += self.m_ConfusionMatrix[i][j]
         classCountSum += classCounts[i]
     mccTotal = 0
     for i in range(self.m_NumClasses):
         temp = self.matthewsCorrelationCoefficient(i)
         if classCounts[i] > 0:
             mccTotal += temp * classCounts[i]
     return Utils.division(mccTotal, classCountSum)
Example #7
0
 def weightedFMeasure(self):
     classCounts = [0] * self.m_NumClasses
     classCountSum = 0
     for i in range(self.m_NumClasses):
         for j in range(self.m_NumClasses):
             classCounts[i] += self.m_ConfusionMatrix[i][j]
         classCountSum += classCounts[i]
     fMeasureTotal = 0
     for i in range(self.m_NumClasses):
         temp = self.fMeasure(i)
         if classCounts[i] > 0:
             fMeasureTotal += temp * classCounts[i]
     return Utils.division(fMeasureTotal, classCountSum)
Example #8
0
 def weightedPrecision(self):
     classCounts=[0]*self.m_NumClasses
     classCountSum=0
     for i in range(self.m_NumClasses):
         for j in range(self.m_NumClasses):
             classCounts[i]+=self.m_ConfusionMatrix[i][j]
         classCountSum+=classCounts[i]
     precisionTotal=0
     for i in range(self.m_NumClasses):
         temp=self.precision(i)
         if classCounts[i]>0:
             precisionTotal+=temp*classCounts[i]
     return Utils.division(precisionTotal, classCountSum)
Example #9
0
 def weightedTruePositiveRate(self):
     classCounts=[0]*self.m_NumClasses
     classCountSum=0
     for i in range(self.m_NumClasses):
         for j in range(self.m_NumClasses):
             classCounts[i]+=self.m_ConfusionMatrix[i][j]
         classCountSum+=classCounts[i]
     truePosTotal=0
     for i in range(self.m_NumClasses):
         temp=self.truePositiveRate(i)
         if classCounts[i]>0:
             truePosTotal+=temp*classCounts[i]
     return Utils.division(truePosTotal, classCountSum)