Esempio n. 1
0
    def computePerformance(self, train_indexes, val_ind):
        '''Check errors of training and validation sets
        @param train_indexes     Tuple that contains indexes of the first and last elements of the training set.
        @param val_ind           Tuple that contains indexes of the first and last elements of the validation set.
        '''
        train_error = 0
        train_sampl = train_indexes[1] - train_indexes[0]       # Count of training samples
        for i in range(train_indexes[0], train_indexes[1]):
            train_error = train_error + self.computeMlpError(sample = self.data[i])
        self.setTrainError(train_error/train_sampl)

        if val_ind:
            val_error = 0
            val_sampl = val_ind[1] - val_ind[0]
            answers   = np.ma.zeros(val_sampl)
            out       = np.ma.zeros(val_sampl)
            for i in xrange(val_ind[0], val_ind[1]):
                sample = self.data[i]
                val_error = val_error + self.computeMlpError(sample = self.data[i])

                input = np.hstack( (sample['state'],sample['factors']) )
                output = self.getOutput(input)
                out[i-val_ind[0]]     = self.outCategory(output)
                answers[i-val_ind[0]] = self.outCategory(sample['output'])
            self.setValError(val_error/val_sampl)
            depCoef = DependenceCoef(out, answers, expand=True)
            self.valKappa = depCoef.kappa(mode=None)
Esempio n. 2
0
    def computePerformance(self, train_indexes, val_ind):
        '''Check errors of training and validation sets
        @param train_indexes     Tuple that contains indexes of the first and last elements of the training set.
        @param val_ind           Tuple that contains indexes of the first and last elements of the validation set.
        '''
        train_error = 0
        train_sampl = train_indexes[1] - train_indexes[
            0]  # Count of training samples
        for i in range(train_indexes[0], train_indexes[1]):
            train_error = train_error + self.computeMlpError(
                sample=self.data[i])
        self.setTrainError(train_error / train_sampl)

        if val_ind:
            val_error = 0
            val_sampl = val_ind[1] - val_ind[0]
            answers = np.ma.zeros(val_sampl)
            out = np.ma.zeros(val_sampl)
            for i in xrange(val_ind[0], val_ind[1]):
                sample = self.data[i]
                val_error = val_error + self.computeMlpError(
                    sample=self.data[i])

                input = np.hstack((sample['state'], sample['factors']))
                output = self.getOutput(input)
                out[i - val_ind[0]] = self.outCategory(output)
                answers[i - val_ind[0]] = self.outCategory(sample['output'])
            self.setValError(val_error / val_sampl)
            depCoef = DependenceCoef(out, answers, expand=True)
            self.valKappa = depCoef.kappa(mode=None)
Esempio n. 3
0
 def train(self):
     X = np.column_stack((self.data['state'], self.data['factors']))
     Y = self.data['output']
     self.labelCodes = np.unique(Y)
     self.logreg.fit(X, Y, maxiter=self.maxiter)
     out = self.logreg.predict(X)
     depCoef = DependenceCoef(np.ma.array(out), np.ma.array(Y), expand=True)
     self.Kappa = depCoef.kappa(mode=None)
     self.pseudoR = depCoef.correctness(percent=False)
Esempio n. 4
0
 def train(self):
     X = np.column_stack( (self.data['state'], self.data['factors']) )
     Y = self.data['output']
     self.labelCodes = np.unique(Y)
     self.logreg.fit(X, Y, maxiter=self.maxiter)
     out = self.logreg.predict(X)
     depCoef = DependenceCoef(np.ma.array(out), np.ma.array(Y), expand=True)
     self.Kappa = depCoef.kappa(mode=None)
     self.pseudoR = depCoef.correctness(percent = False)
Esempio n. 5
0
    def test_cramer(self):
        dc = DependenceCoef(self.X, self.Y)
        self.T_cramer = np.subtract(self.T, self.T_cramer_expect)
        self.T_cramer = np.square(self.T_cramer)
        self.x2 = np.sum(np.divide(self.T_cramer, self.T_cramer_expect))
        self.cramer = math.sqrt(self.x2 / (self.total * min(self.r-1,self.s-1)))
        self.assertEqual(dc.cramer(), self.cramer, 'cramer coeff failed')

        dc = DependenceCoef(self.X, self.X)
        self.assertEqual(dc.cramer(), 1.0, 'cramer coeff failed')
Esempio n. 6
0
    def test_correlation(self):
        dc = DependenceCoef(self.X, self.Y)
        n = len(np.ma.compressed(self.X))
        mean_x = np.ma.mean(self.X)
        mean_y = np.ma.mean(self.Y)
        self.cov = np.ma.sum(np.multiply(np.subtract(self.X, mean_x), np.subtract(self.Y, mean_y)))/n
        self.S_x = np.std(self.X)
        self.S_y = np.std(self.Y)
        self.R = self.cov / (self.S_x * self.S_y)
        self.assertEqual(dc.correlation(), self.R,'correlation failed')

        dc = DependenceCoef(self.X, self.X)
        self.assertEqual(dc.correlation(), 1.0,'correlation failed')
Esempio n. 7
0
    def test_kappa(self):
        dc = DependenceCoef(self.Y, self.Y1)
        #~ table =  np.array([
            #~ [1, 2, 1],
            #~ [0, 1, 0],
            #~ [2, 0, 1],
        #~ ])
        Pa = 3.0/8
        Pe = 21.0/64
        Pmax = 6.0/8

        answer = (Pa - Pe)/(1 - Pe)
        self.assertEqual(dc.kappa(), answer)

        answer = (Pa - Pe)/(Pmax - Pe)
        self.assertEqual(dc.kappa(mode='loc'), answer)

        answer = (Pmax - Pe)/(1 - Pe)
        self.assertEqual(dc.kappa(mode='histo'), answer)
Esempio n. 8
0
    def test_kappa(self):
        dc = DependenceCoef(self.Y, self.Y1)
        #~ table =  np.array([
        #~ [1, 2, 1],
        #~ [0, 1, 0],
        #~ [2, 0, 1],
        #~ ])
        Pa = 3.0 / 8
        Pe = 21.0 / 64
        Pmax = 6.0 / 8

        answer = (Pa - Pe) / (1 - Pe)
        self.assertEqual(dc.kappa(), answer)

        answer = (Pa - Pe) / (Pmax - Pe)
        self.assertEqual(dc.kappa(mode='loc'), answer)

        answer = (Pmax - Pe) / (1 - Pe)
        self.assertEqual(dc.kappa(mode='histo'), answer)
Esempio n. 9
0
    def test_cramer(self):
        dc = DependenceCoef(self.X, self.Y)
        self.T_cramer = np.subtract(self.T, self.T_cramer_expect)
        self.T_cramer = np.square(self.T_cramer)
        self.x2 = np.sum(np.divide(self.T_cramer, self.T_cramer_expect))
        self.cramer = math.sqrt(self.x2 /
                                (self.total * min(self.r - 1, self.s - 1)))
        self.assertEqual(dc.cramer(), self.cramer, 'cramer coeff failed')

        dc = DependenceCoef(self.X, self.X)
        self.assertEqual(dc.cramer(), 1.0, 'cramer coeff failed')
Esempio n. 10
0
    def test_correlation(self):
        dc = DependenceCoef(self.X, self.Y)
        n = len(np.ma.compressed(self.X))
        mean_x = np.ma.mean(self.X)
        mean_y = np.ma.mean(self.Y)
        self.cov = np.ma.sum(
            np.multiply(np.subtract(self.X, mean_x), np.subtract(
                self.Y, mean_y))) / n
        self.S_x = np.std(self.X)
        self.S_y = np.std(self.Y)
        self.R = self.cov / (self.S_x * self.S_y)
        self.assertEqual(dc.correlation(), self.R, 'correlation failed')

        dc = DependenceCoef(self.X, self.X)
        self.assertEqual(dc.correlation(), 1.0, 'correlation failed')
Esempio n. 11
0
 def test_jiu(self):
     dc = DependenceCoef(self.X, self.Y)
     self.assertAlmostEqual(dc.jiu(), 0.385101639127, 9, 'joint coeff failed')
     dc = DependenceCoef(self.X, self.X)
     self.assertEqual(dc.jiu(), 1.0, 'joint coeff failed')
Esempio n. 12
0
 def test_correctness(self):
     dc = DependenceCoef(self.X, self.X)
     self.assertEqual(dc.correctness(), 100.0, 'correctness % failed')
Esempio n. 13
0
 def test_jiu(self):
     dc = DependenceCoef(self.X, self.Y)
     self.assertAlmostEqual(dc.jiu(), 0.385101639127, 9,
                            'joint coeff failed')
     dc = DependenceCoef(self.X, self.X)
     self.assertEqual(dc.jiu(), 1.0, 'joint coeff failed')
Esempio n. 14
0
 def test_correctness(self):
     dc = DependenceCoef(self.X, self.X)
     self.assertEqual(dc.correctness(), 100.0, 'correctness % failed')