Beispiel #1
0
    def testMoreSVD(self):
        pm = SVDMapper()
        # train SVD
        pm.train(self.largefeat)

        # mixing matrix cannot be square
        self.failUnlessEqual(pm.proj.shape, (40, 10))

        # only first singular value significant
        self.failUnless(pm.sv[:1] > 10)
        self.failUnless((pm.sv[1:] < 10).all())

        # now project data into SVD space
        p = pm.forward(self.largefeat.samples)

        # only variance of first component significant
        var = p.var(axis=0)

        # test that only one component has variance
        self.failUnless(var[:1] > 1.0)
        self.failUnless((var[1:] < 0.0001).all())

        # check that the mapped data can be fully recovered by 'reverse()'
        rp = pm.reverse(p)
        self.failUnlessEqual(rp.shape, self.largefeat.samples.shape)
        self.failUnless((N.round(rp) == self.largefeat.samples).all())

        self.failUnlessEqual(pm.getInSize(), 40)
        self.failUnlessEqual(pm.getOutSize(), 10)

        # copy mapper
        pm2 = deepcopy(pm)

        # now remove all but the first 2 components from the mapper
        pm2.selectOut([0, 1])

        # sanity check
        self.failUnlessEqual(pm2.getInSize(), 40)
        self.failUnlessEqual(pm2.getOutSize(), 2)

        # but orginal mapper must be left intact
        self.failUnlessEqual(pm.getInSize(), 40)
        self.failUnlessEqual(pm.getOutSize(), 10)

        # data should still be fully recoverable by 'reverse()'
        rp2 = pm2.reverse(p[:, [0, 1]])
        self.failUnlessEqual(rp2.shape, self.largefeat.samples.shape)
        self.failUnless(N.abs(rp2 - self.largefeat.samples).sum() < 0.0001)

        # now make new random data and do forward->reverse check
        data = N.random.normal(size=(98, 40))
        data_f = pm.forward(data)

        self.failUnlessEqual(data_f.shape, (98, 10))

        data_r = pm.reverse(data_f)
        self.failUnlessEqual(data_r.shape, (98, 40))
Beispiel #2
0
    def test_simple_svd(self):
        pm = SVDMapper()
        # train SVD
        pm.train(self.ndlin)

        self.failUnlessEqual(pm.proj.shape, (20, 20))

        # now project data into PCA space
        p = pm.forward(self.ndlin)

        # only first eigenvalue significant
        self.failUnless(pm.sv[:1] > 1.0)
        self.failUnless((pm.sv[1:] < 0.0001).all())

        # only variance of first component significant
        var = p.var(axis=0)

        # test that only one component has variance
        self.failUnless(var[:1] > 1.0)
        self.failUnless((var[1:] < 0.0001).all())

        # check that the mapped data can be fully recovered by 'reverse()'
        pr = pm.reverse(p)

        self.failUnlessEqual(pr.shape, (40, 20))
        self.failUnless(np.abs(pm.reverse(p) - self.ndlin).sum() < 0.0001)
Beispiel #3
0
    def test_simple_svd(self):
        pm = SVDMapper()
        # train SVD
        pm.train(self.ndlin)

        self.failUnlessEqual(pm.proj.shape, (20, 20))

        # now project data into PCA space
        p = pm.forward(self.ndlin)

        # only first eigenvalue significant
        self.failUnless(pm.sv[:1] > 1.0)
        self.failUnless((pm.sv[1:] < 0.0001).all())

        # only variance of first component significant
        var = p.var(axis=0)

       # test that only one component has variance
        self.failUnless(var[:1] > 1.0)
        self.failUnless((var[1:] < 0.0001).all())

        # check that the mapped data can be fully recovered by 'reverse()'
        pr = pm.reverse(p)

        self.failUnlessEqual(pr.shape, (40,20))
        self.failUnless(np.abs(pm.reverse(p) - self.ndlin).sum() < 0.0001)
Beispiel #4
0
    def test_more_svd(self):
        pm = SVDMapper()
        # train SVD
        pm.train(self.largefeat)

        # mixing matrix cannot be square
        self.failUnlessEqual(pm.proj.shape, (40, 10))

        # only first singular value significant
        self.failUnless(pm.sv[:1] > 10)
        self.failUnless((pm.sv[1:] < 10).all())

        # now project data into SVD space
        p = pm.forward(self.largefeat)

        # only variance of first component significant
        var = p.var(axis=0)

        # test that only one component has variance
        self.failUnless(var[:1] > 1.0)
        self.failUnless((var[1:] < 0.0001).all())

        # check that the mapped data can be fully recovered by 'reverse()'
        rp = pm.reverse(p)
        self.failUnlessEqual(rp.shape, self.largefeat.shape)
        self.failUnless((np.round(rp) == self.largefeat).all())

        # copy mapper
        pm2 = deepcopy(pm)

        # now make new random data and do forward->reverse check
        data = np.random.normal(size=(98, 40))
        data_f = pm.forward(data)

        self.failUnlessEqual(data_f.shape, (98, 10))

        data_r = pm.reverse(data_f)
        self.failUnlessEqual(data_r.shape, (98, 40))
Beispiel #5
0
    def test_more_svd(self):
        pm = SVDMapper()
        # train SVD
        pm.train(self.largefeat)

        # mixing matrix cannot be square
        self.failUnlessEqual(pm.proj.shape, (40, 10))

        # only first singular value significant
        self.failUnless(pm.sv[:1] > 10)
        self.failUnless((pm.sv[1:] < 10).all())

        # now project data into SVD space
        p = pm.forward(self.largefeat)

        # only variance of first component significant
        var = p.var(axis=0)

        # test that only one component has variance
        self.failUnless(var[:1] > 1.0)
        self.failUnless((var[1:] < 0.0001).all())

        # check that the mapped data can be fully recovered by 'reverse()'
        rp = pm.reverse(p)
        self.failUnlessEqual(rp.shape, self.largefeat.shape)
        self.failUnless((np.round(rp) == self.largefeat).all())

        # copy mapper
        pm2 = deepcopy(pm)

        # now make new random data and do forward->reverse check
        data = np.random.normal(size=(98,40))
        data_f = pm.forward(data)

        self.failUnlessEqual(data_f.shape, (98,10))

        data_r = pm.reverse(data_f)
        self.failUnlessEqual(data_r.shape, (98,40))