Пример #1
0
 def test_sfa2_gradient2(self):
     def _alt_sfa2_grad(self, x):
         """Reference grad method based on quadratic forms."""
         # note that the H and f arrays are cached in the node and remain even
         # after the extension has been deactivated
         if not hasattr(self, "__gradient_Hs"):
             quad_forms = [self.get_quadratic_form(i)
                           for i in range(self.output_dim)]
             self.__gradient_Hs = numx.vstack((quad_form.H[numx.newaxis]
                                             for quad_form in quad_forms))
             self.__gradient_fs = numx.vstack((quad_form.f[numx.newaxis]
                                             for quad_form in quad_forms))
         grad = (numx.dot(x, self.__gradient_Hs) +
                     numx.repeat(self.__gradient_fs[numx.newaxis,:,:],
                               len(x), axis=0))
         return grad
     sfa2_node = bimdp.nodes.SFA2BiNode(output_dim=3)
     x = numx_rand.random((300, 6))
     sfa2_node.train(x)
     sfa2_node.stop_training()
     x = numx_rand.random((2, 6))
     mdp.activate_extension("gradient")
     try:
         result1 = sfa2_node.execute(x, {"method": "gradient"})
         grad1 = result1[1]["grad"]
         grad2 = _alt_sfa2_grad(sfa2_node, x)
         assert numx.amax(abs(grad1 - grad2)) < 1E-9
     finally:
         mdp.deactivate_extension("gradient")
Пример #2
0
    def test_sfa2_gradient2(self):
        def _alt_sfa2_grad(self, x):
            """Reference grad method based on quadratic forms."""
            # note that the H and f arrays are cached in the node and remain even
            # after the extension has been deactivated
            if not hasattr(self, "__gradient_Hs"):
                quad_forms = [
                    self.get_quadratic_form(i) for i in range(self.output_dim)
                ]
                self.__gradient_Hs = numx.vstack(
                    (quad_form.H[numx.newaxis] for quad_form in quad_forms))
                self.__gradient_fs = numx.vstack(
                    (quad_form.f[numx.newaxis] for quad_form in quad_forms))
            grad = (numx.dot(x, self.__gradient_Hs) + numx.repeat(
                self.__gradient_fs[numx.newaxis, :, :], len(x), axis=0))
            return grad

        sfa2_node = bimdp.nodes.SFA2BiNode(output_dim=3)
        x = numx_rand.random((300, 6))
        sfa2_node.train(x)
        sfa2_node.stop_training()
        x = numx_rand.random((2, 6))
        mdp.activate_extension("gradient")
        try:
            result1 = sfa2_node.execute(x, {"method": "gradient"})
            grad1 = result1[1]["grad"]
            grad2 = _alt_sfa2_grad(sfa2_node, x)
            assert numx.amax(abs(grad1 - grad2)) < 1E-9
        finally:
            mdp.deactivate_extension("gradient")
Пример #3
0
 def test_sfa2_gradient(self):
     sfa2_node1 = bimdp.nodes.SFA2BiNode(output_dim=5)
     sfa2_node2 = bimdp.nodes.SFA2BiNode(output_dim=3)
     flow = sfa2_node1 + sfa2_node2
     x = numx_rand.random((300, 6))
     flow.train(x)
     x = numx_rand.random((2, 6))
     mdp.activate_extension("gradient")
     try:
         flow.execute(x, {"method": "gradient"})
     finally:
         mdp.deactivate_extension("gradient")
Пример #4
0
def test_mult_diag():
    dim = 20
    d = numx_rand.random(size=(dim,))
    dd = numx.diag(d)
    mtx = numx_rand.random(size=(dim, dim))

    res1 = utils.mult(dd, mtx)
    res2 = utils.mult_diag(d, mtx, left=True)
    assert_array_almost_equal(res1, res2, 10)
    res1 = utils.mult(mtx, dd)
    res2 = utils.mult_diag(d, mtx, left=False)
    assert_array_almost_equal(res1, res2, 10)
Пример #5
0
 def test_sfa2_gradient(self):
     sfa2_node1 = bimdp.nodes.SFA2BiNode(output_dim=5)
     sfa2_node2 = bimdp.nodes.SFA2BiNode(output_dim=3)
     flow = sfa2_node1 + sfa2_node2
     x = numx_rand.random((300, 6))
     flow.train(x)
     x = numx_rand.random((2, 6))
     mdp.activate_extension("gradient")
     try:
         flow.execute(x, {"method": "gradient"})
     finally:
         mdp.deactivate_extension("gradient")
Пример #6
0
 def test_sfa_gradient(self):
     """Test gradient for combination of SFA nodes."""
     sfa_node1 = bimdp.nodes.SFABiNode(output_dim=8)
     sfa_node2 = bimdp.nodes.SFABiNode(output_dim=7)
     sfa_node3 = bimdp.nodes.SFABiNode(output_dim=5)
     flow = sfa_node1 + sfa_node2 + sfa_node3
     x = numx_rand.random((300, 10))
     flow.train(x)
     x = numx_rand.random((2, 10))
     mdp.activate_extension("gradient")
     try:
         flow.execute(x, {"method": "gradient"})
     finally:
         mdp.deactivate_extension("gradient")
Пример #7
0
 def test_sfa_gradient(self):
     """Test gradient for combination of SFA nodes."""
     sfa_node1 = bimdp.nodes.SFABiNode(output_dim=8)
     sfa_node2 = bimdp.nodes.SFABiNode(output_dim=7)
     sfa_node3 = bimdp.nodes.SFABiNode(output_dim=5)
     flow = sfa_node1 + sfa_node2 + sfa_node3
     x = numx_rand.random((300, 10))
     flow.train(x)
     x = numx_rand.random((2, 10))
     mdp.activate_extension("gradient")
     try:
         flow.execute(x, {"method": "gradient"})
     finally:
         mdp.deactivate_extension("gradient")
Пример #8
0
def test_casting():
    x = numx_rand.random((5,3)).astype('d')
    y = 3*x
    assert_type_equal(y.dtype, x.dtype)
    x = numx_rand.random((5,3)).astype('f')
    y = 3.*x
    assert_type_equal(y.dtype, x.dtype)
    x = (10*numx_rand.random((5,3))).astype('i')
    y = 3.*x
    assert_type_equal(y.dtype, 'd')
    y = 3L*x
    assert_type_equal(y.dtype, 'i')
    x = numx_rand.random((5,3)).astype('f')
    y = 3L*x
    assert_type_equal(y.dtype, 'f')
Пример #9
0
 def test_clonebilayer_gradient(self):
     """Test gradient for a simple layer."""
     layer = bimdp.hinet.CloneBiLayer(
                         bimdp.nodes.SFA2BiNode(input_dim=5, output_dim=2),
                         n_nodes=3)
     x = numx_rand.random((100,15))
     layer.train(x)
     layer.stop_training()
     mdp.activate_extension("gradient")
     try:
         x = numx_rand.random((7,15))
         result = layer._gradient(x)
         grad = result[1]["grad"]
         assert grad.shape == (7,6,15)
     finally:
         mdp.deactivate_extension("gradient")
Пример #10
0
 def test_clonebilayer_gradient(self):
     """Test gradient for a simple layer."""
     layer = bimdp.hinet.CloneBiLayer(bimdp.nodes.SFA2BiNode(input_dim=5,
                                                             output_dim=2),
                                      n_nodes=3)
     x = numx_rand.random((100, 15))
     layer.train(x)
     layer.stop_training()
     mdp.activate_extension("gradient")
     try:
         x = numx_rand.random((7, 15))
         result = layer._gradient(x)
         grad = result[1]["grad"]
         assert grad.shape == (7, 6, 15)
     finally:
         mdp.deactivate_extension("gradient")
Пример #11
0
def test_QuadraticForm_extrema():
    # TODO: add some real test
    # check H with negligible linear term
    noise = 1e-8
    tol = 1e-6
    x = numx_rand.random((10,))
    H = numx.outer(x, x) + numx.eye(10)*0.1
    f = noise*numx_rand.random((10,))
    q = utils.QuadraticForm(H, f)
    xmax, xmin = q.get_extrema(utils.norm2(x), tol=tol)
    assert_array_almost_equal(x, xmax, 5)
    # check I + linear term
    H = numx.eye(10, dtype='d')
    f = x
    q = utils.QuadraticForm(H, f=f)
    xmax, xmin = q.get_extrema(utils.norm2(x), tol=tol)
    assert_array_almost_equal(f, xmax, 5)
Пример #12
0
def test_introspection():
    bogus = BogusNode()
    arrays, string = utils.dig_node(bogus)
    assert len(arrays.keys()) == 4, 'Not all arrays where caught'
    assert sorted(arrays.keys()) == ['x', 'y.x',
                                     'z.x', 'z.z.x'], 'Wrong names'
    sizes = [x[0] for x in arrays.values()]
    assert sorted(sizes) == [numx_rand.random((2,2)).itemsize*4]*4, \
           'Wrong sizes'
    sfa = nodes.SFANode()
    sfa.train(numx_rand.random((1000, 10)))
    a_sfa, string = utils.dig_node(sfa)
    keys = ['_cov_mtx._avg', '_cov_mtx._cov_mtx',
            '_dcov_mtx._avg', '_dcov_mtx._cov_mtx']
    assert sorted(a_sfa.keys()) == keys, 'Wrong arrays in SFANode'
    sfa.stop_training()
    a_sfa, string = utils.dig_node(sfa)
    keys = ['_bias', 'avg', 'd', 'davg', 'sf']
    assert sorted(a_sfa.keys()) == keys, 'Wrong arrays in SFANode'
Пример #13
0
 def test_quadexpan_gradient2(self):
     """Test gradient with multiple data points."""
     node = mdp.nodes.QuadraticExpansionNode()
     x = numx_rand.random((3,5))
     node.execute(x)
     mdp.activate_extension("gradient")
     try:
         result = node._gradient(x)
         gradient = result[1]["grad"]
         assert gradient.shape == (3,20,5)
     finally:
         mdp.deactivate_extension("gradient")
Пример #14
0
 def test_quadexpan_gradient2(self):
     """Test gradient with multiple data points."""
     node = mdp.nodes.QuadraticExpansionNode()
     x = numx_rand.random((3, 5))
     node.execute(x)
     mdp.activate_extension("gradient")
     try:
         result = node._gradient(x)
         gradient = result[1]["grad"]
         assert gradient.shape == (3, 20, 5)
     finally:
         mdp.deactivate_extension("gradient")
Пример #15
0
 def test_layer_gradient(self):
     """Test gradient for a simple layer."""
     node1 = mdp.nodes.SFA2Node(input_dim=4, output_dim=3)
     node2 = mdp.nodes.SFANode(input_dim=6, output_dim=2)
     layer = mdp.hinet.Layer([node1, node2])
     x = numx_rand.random((100, 10))
     layer.train(x)
     layer.stop_training()
     mdp.activate_extension("gradient")
     try:
         x = numx_rand.random((7, 10))
         result = layer._gradient(x)
         grad = result[1]["grad"]
         # get reference result
         grad1 = node1._gradient(x[:, :node1.input_dim])[1]["grad"]
         grad2 = node2._gradient(x[:, node1.input_dim:])[1]["grad"]
         ref_grad = numx.zeros(((7, 5, 10)))
         ref_grad[:, :node1.output_dim, :node1.input_dim] = grad1
         ref_grad[:, node1.output_dim:, node1.input_dim:] = grad2
         assert numx.all(grad == ref_grad)
     finally:
         mdp.deactivate_extension("gradient")
Пример #16
0
def test_symeig_fake_LAPACK_bug():
    # bug. when input matrix is almost an identity matrix
    # but not exactly, the lapack dgeev routine returns a
    # matrix of eigenvectors which is not orthogonal.
    # this bug was present when we used numx_linalg.eig
    # instead of numx_linalg.eigh .
    # Note: this is a LAPACK bug.
    y = numx_rand.random((4,4))*1E-16
    y = (y+y.T)/2
    for i in xrange(4):
        y[i,i]=1
    val, vec = utils._symeig._symeig_fake(y)
    assert_almost_equal(abs(numx_linalg.det(vec)), 1., 12)
Пример #17
0
 def test_layer_gradient(self):
     """Test gradient for a simple layer."""
     node1 = mdp.nodes.SFA2Node(input_dim=4, output_dim=3)
     node2 = mdp.nodes.SFANode(input_dim=6, output_dim=2)
     layer = mdp.hinet.Layer([node1, node2])
     x = numx_rand.random((100,10))
     layer.train(x)
     layer.stop_training()
     mdp.activate_extension("gradient")
     try:
         x = numx_rand.random((7,10))
         result = layer._gradient(x)
         grad = result[1]["grad"]
         # get reference result
         grad1 = node1._gradient(x[:, :node1.input_dim])[1]["grad"]
         grad2 = node2._gradient(x[:, node1.input_dim:])[1]["grad"]
         ref_grad = numx.zeros(((7,5,10)))
         ref_grad[:, :node1.output_dim, :node1.input_dim] = grad1
         ref_grad[:, node1.output_dim:, node1.input_dim:] = grad2
         assert numx.all(grad == ref_grad)
     finally:
         mdp.deactivate_extension("gradient")
Пример #18
0
 def test_switchboard_gradient1(self):
     """Test that gradient is correct for a tiny switchboard."""
     sboard = mdp.hinet.Switchboard(input_dim=4, connections=[2,0])
     x = numx_rand.random((2,4))
     mdp.activate_extension("gradient")
     try:
         result = sboard._gradient(x)
         grad = result[1]["grad"]
         ref_grad = numx.array([[[0,0,1,0], [1,0,0,0]],
                              [[0,0,1,0], [1,0,0,0]]], dtype=grad.dtype)
         assert numx.all(grad == ref_grad)
     finally:
         mdp.deactivate_extension("gradient")
Пример #19
0
 def test_network_gradient(self):
     """Test gradient for a small SFA network."""
     sfa_node = bimdp.nodes.SFABiNode(input_dim=4 * 4, output_dim=5)
     switchboard = bimdp.hinet.Rectangular2dBiSwitchboard(
         in_channels_xy=8, field_channels_xy=4, field_spacing_xy=2)
     flownode = bimdp.hinet.BiFlowNode(bimdp.BiFlow([sfa_node]))
     sfa_layer = bimdp.hinet.CloneBiLayer(flownode,
                                          switchboard.output_channels)
     flow = bimdp.BiFlow([switchboard, sfa_layer])
     train_gen = [
         numx_rand.random((10, switchboard.input_dim)) for _ in range(3)
     ]
     flow.train([None, train_gen])
     # now can test the gradient
     mdp.activate_extension("gradient")
     try:
         x = numx_rand.random((3, switchboard.input_dim))
         result = flow(x, {"method": "gradient"})
         grad = result[1]["grad"]
         assert grad.shape == (3, sfa_layer.output_dim,
                               switchboard.input_dim)
     finally:
         mdp.deactivate_extension("gradient")
Пример #20
0
 def test_switchboard_gradient1(self):
     """Test that gradient is correct for a tiny switchboard."""
     sboard = mdp.hinet.Switchboard(input_dim=4, connections=[2, 0])
     x = numx_rand.random((2, 4))
     mdp.activate_extension("gradient")
     try:
         result = sboard._gradient(x)
         grad = result[1]["grad"]
         ref_grad = numx.array(
             [[[0, 0, 1, 0], [1, 0, 0, 0]], [[0, 0, 1, 0], [1, 0, 0, 0]]],
             dtype=grad.dtype)
         assert numx.all(grad == ref_grad)
     finally:
         mdp.deactivate_extension("gradient")
Пример #21
0
 def test_gradient_product(self):
     """Test that the product of gradients is calculated correctly."""
     sfa_node1 = bimdp.nodes.SFABiNode(output_dim=5)
     sfa_node2 = bimdp.nodes.SFABiNode(output_dim=3)
     flow = sfa_node1 + sfa_node2
     x = numx_rand.random((300, 10))
     flow.train(x)
     mdp.activate_extension("gradient")
     try:
         x1 = numx_rand.random((2, 10))
         x2, msg = sfa_node1.execute(x1, {"method": "gradient"})
         grad1 = msg["grad"]
         _, msg = sfa_node2.execute(x2, {"method": "gradient"})
         grad2 = msg["grad"]
         grad12 = flow.execute(x1, {"method": "gradient"})[1]["grad"]
         # use a different way to calculate the product of the gradients,
         # this method is too memory intensive for large data
         ref_grad = numx.sum(grad2[:,:,numx.newaxis,:] *
                          numx.transpose(grad1[:,numx.newaxis,:,:], (0,1,3,2)),
                          axis=3)
         assert numx.amax(abs(ref_grad - grad12)) < 1E-9
     finally:
         mdp.deactivate_extension("gradient")
Пример #22
0
 def test_gradient_product(self):
     """Test that the product of gradients is calculated correctly."""
     sfa_node1 = bimdp.nodes.SFABiNode(output_dim=5)
     sfa_node2 = bimdp.nodes.SFABiNode(output_dim=3)
     flow = sfa_node1 + sfa_node2
     x = numx_rand.random((300, 10))
     flow.train(x)
     mdp.activate_extension("gradient")
     try:
         x1 = numx_rand.random((2, 10))
         x2, msg = sfa_node1.execute(x1, {"method": "gradient"})
         grad1 = msg["grad"]
         _, msg = sfa_node2.execute(x2, {"method": "gradient"})
         grad2 = msg["grad"]
         grad12 = flow.execute(x1, {"method": "gradient"})[1]["grad"]
         # use a different way to calculate the product of the gradients,
         # this method is too memory intensive for large data
         ref_grad = numx.sum(grad2[:, :, numx.newaxis, :] *
                             numx.transpose(grad1[:, numx.newaxis, :, :],
                                            (0, 1, 3, 2)),
                             axis=3)
         assert numx.amax(abs(ref_grad - grad12)) < 1E-9
     finally:
         mdp.deactivate_extension("gradient")
Пример #23
0
 def test_network_gradient(self):
     """Test gradient for a small SFA network."""
     sfa_node = bimdp.nodes.SFABiNode(input_dim=4*4, output_dim=5)
     switchboard = bimdp.hinet.Rectangular2dBiSwitchboard(
                                               in_channels_xy=8,
                                               field_channels_xy=4,
                                               field_spacing_xy=2)
     flownode = bimdp.hinet.BiFlowNode(bimdp.BiFlow([sfa_node]))
     sfa_layer = bimdp.hinet.CloneBiLayer(flownode,
                                          switchboard.output_channels)
     flow = bimdp.BiFlow([switchboard, sfa_layer])
     train_gen = [numx_rand.random((10, switchboard.input_dim))
                  for _ in range(3)]
     flow.train([None, train_gen])
     # now can test the gradient
     mdp.activate_extension("gradient")
     try:
         x = numx_rand.random((3, switchboard.input_dim))
         result = flow(x, {"method": "gradient"})
         grad = result[1]["grad"]
         assert grad.shape == (3, sfa_layer.output_dim,
                               switchboard.input_dim)
     finally:
         mdp.deactivate_extension("gradient")
Пример #24
0
def test_QuadraticForm_non_symmetric_raises():
    """Test the detection of non symmetric H!
    """
    H = numx_rand.random((10,10))
    py.test.raises(mdp.utils.QuadraticFormException,
                   utils.QuadraticForm, H)
Пример #25
0
 def __init__(self):
     self.x = numx_rand.random((2,2))