def testExtensionInheritance(): """Test inheritance of extension nodes.""" class TestExtensionNode(mdp.ExtensionNode): extension_name = "__test" def _testtest(self): pass class TestSFANode(TestExtensionNode, mdp.nodes.SFANode): def _testtest(self): return 42 _testtest_attr = 1337 class TestSFA2Node(TestSFANode, mdp.nodes.SFA2Node): def _testtest(self): if sys.version_info[0] < 3: return TestSFANode._testtest.__func__(self) else: return TestSFANode._testtest(self) sfa2_node = mdp.nodes.SFA2Node() mdp.activate_extension("__test") assert sfa2_node._testtest() == 42 assert sfa2_node._testtest_attr == 1337
def testContextManager1(): """Test that the context manager activates extensions.""" class Test1ExtensionNode(mdp.ExtensionNode): extension_name = "__test1" def _testtest(self): pass class Test2ExtensionNode(mdp.ExtensionNode): extension_name = "__test2" def _testtest(self): pass assert mdp.get_active_extensions() == [] with mdp.extension('__test1'): assert mdp.get_active_extensions() == ['__test1'] assert mdp.get_active_extensions() == [] # with multiple extensions with mdp.extension(['__test1', '__test2']): active = mdp.get_active_extensions() assert '__test1' in active assert '__test2' in active assert mdp.get_active_extensions() == [] mdp.activate_extension("__test1") # Test that only activated extensions are deactiveted. with mdp.extension(['__test1', '__test2']): active = mdp.get_active_extensions() assert '__test1' in active assert '__test2' in active assert mdp.get_active_extensions() == ["__test1"]
def test_sfa2_gradient2(self): def _alt_sfa2_grad(self, x): """Reference grad method based on quadratic forms.""" # note that the H and f arrays are cached in the node and remain even # after the extension has been deactivated if not hasattr(self, "__gradient_Hs"): quad_forms = [self.get_quadratic_form(i) for i in range(self.output_dim)] self.__gradient_Hs = numx.vstack((quad_form.H[numx.newaxis] for quad_form in quad_forms)) self.__gradient_fs = numx.vstack((quad_form.f[numx.newaxis] for quad_form in quad_forms)) grad = (numx.dot(x, self.__gradient_Hs) + numx.repeat(self.__gradient_fs[numx.newaxis,:,:], len(x), axis=0)) return grad sfa2_node = bimdp.nodes.SFA2BiNode(output_dim=3) x = numx_rand.random((300, 6)) sfa2_node.train(x) sfa2_node.stop_training() x = numx_rand.random((2, 6)) mdp.activate_extension("gradient") try: result1 = sfa2_node.execute(x, {"method": "gradient"}) grad1 = result1[1]["grad"] grad2 = _alt_sfa2_grad(sfa2_node, x) assert numx.amax(abs(grad1 - grad2)) < 1E-9 finally: mdp.deactivate_extension("gradient")
def testExtensionInheritanceInjection(): """Test the injection of inherited methods""" class TestNode(object): def _test1(self): return 0 class TestExtensionNode(mdp.ExtensionNode): extension_name = "__test" def _test1(self): return 1 def _test2(self): return 2 def _test3(self): return 3 class TestNodeExt(TestExtensionNode, TestNode): def _test2(self): return "2b" @mdp.extension_method("__test", TestNode) def _test4(self): return 4 test_node = TestNode() mdp.activate_extension("__test") assert test_node._test1() == 1 assert test_node._test2() == "2b" assert test_node._test3() == 3 assert test_node._test4() == 4 mdp.deactivate_extension("__test") assert test_node._test1() == 0 assert not hasattr(test_node, "_test2") assert not hasattr(test_node, "_test3") assert not hasattr(test_node, "_test4")
def test_sfa2_gradient2(self): def _alt_sfa2_grad(self, x): """Reference grad method based on quadratic forms.""" # note that the H and f arrays are cached in the node and remain even # after the extension has been deactivated if not hasattr(self, "__gradient_Hs"): quad_forms = [ self.get_quadratic_form(i) for i in range(self.output_dim) ] self.__gradient_Hs = numx.vstack( (quad_form.H[numx.newaxis] for quad_form in quad_forms)) self.__gradient_fs = numx.vstack( (quad_form.f[numx.newaxis] for quad_form in quad_forms)) grad = (numx.dot(x, self.__gradient_Hs) + numx.repeat( self.__gradient_fs[numx.newaxis, :, :], len(x), axis=0)) return grad sfa2_node = bimdp.nodes.SFA2BiNode(output_dim=3) x = numx_rand.random((300, 6)) sfa2_node.train(x) sfa2_node.stop_training() x = numx_rand.random((2, 6)) mdp.activate_extension("gradient") try: result1 = sfa2_node.execute(x, {"method": "gradient"}) grad1 = result1[1]["grad"] grad2 = _alt_sfa2_grad(sfa2_node, x) assert numx.amax(abs(grad1 - grad2)) < 1E-9 finally: mdp.deactivate_extension("gradient")
def test_quadexpan_gradient2(self): """Test gradient with multiple data points.""" node = mdp.nodes.QuadraticExpansionNode() x = numx_rand.random((3, 5)) node.execute(x) mdp.activate_extension("gradient") try: result = node._gradient(x) gradient = result[1]["grad"] assert gradient.shape == (3, 20, 5) finally: mdp.deactivate_extension("gradient")
def test_sfa2_gradient(self): sfa2_node1 = bimdp.nodes.SFA2BiNode(output_dim=5) sfa2_node2 = bimdp.nodes.SFA2BiNode(output_dim=3) flow = sfa2_node1 + sfa2_node2 x = numx_rand.random((300, 6)) flow.train(x) x = numx_rand.random((2, 6)) mdp.activate_extension("gradient") try: flow.execute(x, {"method": "gradient"}) finally: mdp.deactivate_extension("gradient")
def test_quadexpan_gradient2(self): """Test gradient with multiple data points.""" node = mdp.nodes.QuadraticExpansionNode() x = numx_rand.random((3,5)) node.execute(x) mdp.activate_extension("gradient") try: result = node._gradient(x) gradient = result[1]["grad"] assert gradient.shape == (3,20,5) finally: mdp.deactivate_extension("gradient")
def testExtensionInheritanceTwoExtensions(): """Test non_extension injection for multiple extensions.""" class Test1ExtensionNode(mdp.ExtensionNode): extension_name = "__test1" def _execute(self): return 1 class Test2ExtensionNode(mdp.ExtensionNode): extension_name = "__test2" class Test3ExtensionNode(mdp.ExtensionNode): extension_name = "__test3" def _execute(self): return "3a" class TestNode1(mdp.Node): pass class TestNode2(TestNode1): pass class ExtendedTest1Node2(Test1ExtensionNode, TestNode2): pass class ExtendedTest2Node1(Test2ExtensionNode, TestNode1): def _execute(self): return 2 class ExtendedTest3Node1(Test3ExtensionNode, TestNode1): def _execute(self): return "3b" test_node = TestNode2() mdp.activate_extension('__test2') assert test_node._execute() == 2 mdp.deactivate_extension('__test2') # in this order TestNode2 should get execute from __test1, # the later addition by __test1 to TestNode1 doesn't matter mdp.activate_extensions(['__test1', '__test2']) assert test_node._execute() == 1 mdp.deactivate_extensions(['__test2', '__test1']) # now activate in inverse order # TestNode2 already gets _execute from __test2, but that is still # overriden by __test1, thats how its registered in _extensions mdp.activate_extensions(['__test2', '__test1']) assert test_node._execute() == 1 mdp.deactivate_extensions(['__test2', '__test1']) ## now the same with extension 3 mdp.activate_extension('__test3') assert test_node._execute() == "3b" mdp.deactivate_extension('__test3') # __test3 does not override, since the _execute slot for Node2 # was first filled by __test1 mdp.activate_extensions(['__test3', '__test1']) assert test_node._execute() == 1 mdp.deactivate_extensions(['__test3', '__test1']) # inverse order mdp.activate_extensions(['__test1', '__test3']) assert test_node._execute() == 1 mdp.deactivate_extensions(['__test2', '__test1'])
def test_switchboard_gradient1(self): """Test that gradient is correct for a tiny switchboard.""" sboard = mdp.hinet.Switchboard(input_dim=4, connections=[2,0]) x = numx_rand.random((2,4)) mdp.activate_extension("gradient") try: result = sboard._gradient(x) grad = result[1]["grad"] ref_grad = numx.array([[[0,0,1,0], [1,0,0,0]], [[0,0,1,0], [1,0,0,0]]], dtype=grad.dtype) assert numx.all(grad == ref_grad) finally: mdp.deactivate_extension("gradient")
def test_sfa_gradient(self): """Test gradient for combination of SFA nodes.""" sfa_node1 = bimdp.nodes.SFABiNode(output_dim=8) sfa_node2 = bimdp.nodes.SFABiNode(output_dim=7) sfa_node3 = bimdp.nodes.SFABiNode(output_dim=5) flow = sfa_node1 + sfa_node2 + sfa_node3 x = numx_rand.random((300, 10)) flow.train(x) x = numx_rand.random((2, 10)) mdp.activate_extension("gradient") try: flow.execute(x, {"method": "gradient"}) finally: mdp.deactivate_extension("gradient")
def testExtensionSetupTeardown(): """Test defining setup and teardown functions.""" setup_calls = [] teardown_calls = [] @mdp.extension_setup("__test") def dummy_setup(): setup_calls.append(True) @mdp.extension_teardown("__test") def dummy_setup(): teardown_calls.append(True) mdp.activate_extension("__test") assert len(setup_calls) == 1 mdp.deactivate_extension("__test") assert len(teardown_calls) == 1
def test_switchboard_gradient1(self): """Test that gradient is correct for a tiny switchboard.""" sboard = mdp.hinet.Switchboard(input_dim=4, connections=[2, 0]) x = numx_rand.random((2, 4)) mdp.activate_extension("gradient") try: result = sboard._gradient(x) grad = result[1]["grad"] ref_grad = numx.array( [[[0, 0, 1, 0], [1, 0, 0, 0]], [[0, 0, 1, 0], [1, 0, 0, 0]]], dtype=grad.dtype) assert numx.all(grad == ref_grad) finally: mdp.deactivate_extension("gradient")
def testExtensionInheritance2(): """Test inheritance of extension nodes, using super.""" class TestExtensionNode(mdp.ExtensionNode): extension_name = "__test" def _testtest(self): pass class TestSFANode(TestExtensionNode, mdp.nodes.SFANode): def _testtest(self): return 42 class TestSFA2Node(mdp.nodes.SFA2Node, TestSFANode): def _testtest(self): return super(mdp.nodes.SFA2Node, self)._testtest() sfa2_node = mdp.nodes.SFA2Node() mdp.activate_extension("__test") assert sfa2_node._testtest() == 42
def test_clonebilayer_gradient(self): """Test gradient for a simple layer.""" layer = bimdp.hinet.CloneBiLayer(bimdp.nodes.SFA2BiNode(input_dim=5, output_dim=2), n_nodes=3) x = numx_rand.random((100, 15)) layer.train(x) layer.stop_training() mdp.activate_extension("gradient") try: x = numx_rand.random((7, 15)) result = layer._gradient(x) grad = result[1]["grad"] assert grad.shape == (7, 6, 15) finally: mdp.deactivate_extension("gradient")
def test_clonebilayer_gradient(self): """Test gradient for a simple layer.""" layer = bimdp.hinet.CloneBiLayer( bimdp.nodes.SFA2BiNode(input_dim=5, output_dim=2), n_nodes=3) x = numx_rand.random((100,15)) layer.train(x) layer.stop_training() mdp.activate_extension("gradient") try: x = numx_rand.random((7,15)) result = layer._gradient(x) grad = result[1]["grad"] assert grad.shape == (7,6,15) finally: mdp.deactivate_extension("gradient")
def testDecoratorExtension(): """Test extension decorator with a single new extension.""" @mdp.extension_method("__test", mdp.nodes.SFANode, "_testtest") def _sfa_testtest(self): return 42 @mdp.extension_method("__test", mdp.nodes.SFA2Node) def _testtest(self): return 42 + _sfa_testtest(self) sfa_node = mdp.nodes.SFANode() sfa2_node = mdp.nodes.SFA2Node() mdp.activate_extension("__test") assert sfa_node._testtest() == 42 assert sfa2_node._testtest() == 84 mdp.deactivate_extension("__test") assert not hasattr(mdp.nodes.SFANode, "_testtest") assert not hasattr(mdp.nodes.SFA2Node, "_testtest")
def testSimpleExtension(): """Test for a single new extension.""" class TestExtensionNode(mdp.ExtensionNode): extension_name = "__test" def _testtest(self): pass _testtest_attr = 1337 class TestSFANode(TestExtensionNode, mdp.nodes.SFANode): def _testtest(self): return 42 _testtest_attr = 1338 sfa_node = mdp.nodes.SFANode() mdp.activate_extension("__test") assert sfa_node._testtest() == 42 assert sfa_node._testtest_attr == 1338 mdp.deactivate_extension("__test") assert not hasattr(mdp.nodes.SFANode, "_testtest")
def testDecoratorInheritance(): """Test inhertiance with decorators for a single new extension.""" class TestExtensionNode(mdp.ExtensionNode): extension_name = "__test" def _testtest(self): pass @mdp.extension_method("__test", mdp.nodes.SFANode, "_testtest") def _sfa_testtest(self): return 42 @mdp.extension_method("__test", mdp.nodes.SFA2Node) def _testtest(self): return 42 + super(mdp.nodes.SFA2Node, self)._testtest() sfa_node = mdp.nodes.SFANode() sfa2_node = mdp.nodes.SFA2Node() mdp.activate_extension("__test") assert sfa_node._testtest() == 42 assert sfa2_node._testtest() == 84
def testExtensionInheritanceInjectionNonExtension(): """Test non_extension method injection.""" class TestExtensionNode(mdp.ExtensionNode): extension_name = "__test" def _execute(self): return 0 class TestNode(mdp.Node): # no _execute method pass class ExtendedTestNode(TestExtensionNode, TestNode): pass test_node = TestNode() mdp.activate_extension('__test') assert hasattr(test_node, "_non_extension__execute") mdp.deactivate_extension('__test') assert not hasattr(test_node, "_non_extension__execute") assert not hasattr(test_node, "_extension_for__execute") # test that the non-native _execute has been completely removed assert "_execute" not in test_node.__class__.__dict__
def test_stop_message_attribute(self): """Test that the stop_result attribute is present in forked node.""" stop_result = ({"test": "blabla"}, "node123") x = n.random.random([100, 10]) node = SFABiNode(stop_result=stop_result) try: mdp.activate_extension("parallel") node2 = node.fork() node2.train(x) forked_result = node2.stop_training() assert forked_result == (None, ) + stop_result # same with derived sfa2 node node = SFA2BiNode(stop_result=stop_result) mdp.activate_extension("parallel") node2 = node.fork() node2.train(x) forked_result = node2.stop_training() assert forked_result == (None, ) + stop_result finally: mdp.deactivate_extension("parallel")
def testExtensionInheritance(): """Test inheritance of extension nodes.""" class TestExtensionNode(mdp.ExtensionNode): extension_name = "__test" def _testtest(self): pass class TestSFANode(TestExtensionNode, mdp.nodes.SFANode): def _testtest(self): return 42 _testtest_attr = 1337 class TestSFA2Node(TestSFANode, mdp.nodes.SFA2Node): def _testtest(self): if sys.version_info[0] < 3: return TestSFANode._testtest.im_func(self) else: return TestSFANode._testtest(self) sfa2_node = mdp.nodes.SFA2Node() mdp.activate_extension("__test") assert sfa2_node._testtest() == 42 assert sfa2_node._testtest_attr == 1337
def test_stop_message_attribute(self): """Test that the stop_result attribute is present in forked node.""" stop_result = ({"test": "blabla"}, "node123") x = n.random.random([100,10]) node = SFABiNode(stop_result=stop_result) try: mdp.activate_extension("parallel") node2 = node.fork() node2.train(x) forked_result = node2.stop_training() assert forked_result == (None,) + stop_result # same with derived sfa2 node node = SFA2BiNode(stop_result=stop_result) mdp.activate_extension("parallel") node2 = node.fork() node2.train(x) forked_result = node2.stop_training() assert forked_result == (None,) + stop_result finally: mdp.deactivate_extension("parallel")
def testExtensionInheritanceInjectionNonExtension2(): """Test non_extension method injection.""" class TestExtensionNode(mdp.ExtensionNode): extension_name = "__test" def _execute(self): return 0 class TestNode(mdp.Node): def _execute(self): return 1 class ExtendedTestNode(TestExtensionNode, TestNode): pass test_node = TestNode() mdp.activate_extension('__test') # test that non-extended attribute has been added as well assert hasattr(test_node, "_non_extension__execute") mdp.deactivate_extension('__test') assert not hasattr(test_node, "_non_extension__execute") assert not hasattr(test_node, "_extension_for__execute") # test that the native _execute has been preserved assert "_execute" in test_node.__class__.__dict__
def testMultipleExtensions(): """Test behavior of multiple extensions.""" class Test1ExtensionNode(mdp.ExtensionNode, mdp.Node): extension_name = "__test1" def _testtest1(self): pass class Test2ExtensionNode(mdp.ExtensionNode, mdp.Node): extension_name = "__test2" def _testtest2(self): pass mdp.activate_extension("__test1") node = mdp.Node() node._testtest1() mdp.activate_extension("__test2") node._testtest2() mdp.deactivate_extension("__test1") assert not hasattr(mdp.nodes.SFANode, "_testtest1") mdp.activate_extension("__test1") node._testtest1() mdp.deactivate_extensions(["__test1", "__test2"]) assert not hasattr(mdp.nodes.SFANode, "_testtest1") assert not hasattr(mdp.nodes.SFANode, "_testtest2")
def test_layer_gradient(self): """Test gradient for a simple layer.""" node1 = mdp.nodes.SFA2Node(input_dim=4, output_dim=3) node2 = mdp.nodes.SFANode(input_dim=6, output_dim=2) layer = mdp.hinet.Layer([node1, node2]) x = numx_rand.random((100, 10)) layer.train(x) layer.stop_training() mdp.activate_extension("gradient") try: x = numx_rand.random((7, 10)) result = layer._gradient(x) grad = result[1]["grad"] # get reference result grad1 = node1._gradient(x[:, :node1.input_dim])[1]["grad"] grad2 = node2._gradient(x[:, node1.input_dim:])[1]["grad"] ref_grad = numx.zeros(((7, 5, 10))) ref_grad[:, :node1.output_dim, :node1.input_dim] = grad1 ref_grad[:, node1.output_dim:, node1.input_dim:] = grad2 assert numx.all(grad == ref_grad) finally: mdp.deactivate_extension("gradient")
def test_layer_gradient(self): """Test gradient for a simple layer.""" node1 = mdp.nodes.SFA2Node(input_dim=4, output_dim=3) node2 = mdp.nodes.SFANode(input_dim=6, output_dim=2) layer = mdp.hinet.Layer([node1, node2]) x = numx_rand.random((100,10)) layer.train(x) layer.stop_training() mdp.activate_extension("gradient") try: x = numx_rand.random((7,10)) result = layer._gradient(x) grad = result[1]["grad"] # get reference result grad1 = node1._gradient(x[:, :node1.input_dim])[1]["grad"] grad2 = node2._gradient(x[:, node1.input_dim:])[1]["grad"] ref_grad = numx.zeros(((7,5,10))) ref_grad[:, :node1.output_dim, :node1.input_dim] = grad1 ref_grad[:, node1.output_dim:, node1.input_dim:] = grad2 assert numx.all(grad == ref_grad) finally: mdp.deactivate_extension("gradient")
def test_quadexpan_gradient1(self): """Test validity of gradient for QuadraticExpansionBiNode.""" node = mdp.nodes.QuadraticExpansionNode() x = numx.array([[1, 3, 4]]) node.execute(x) mdp.activate_extension("gradient") try: result = node._gradient(x) grad = result[1]["grad"] reference = numx.array( [[[ 1, 0, 0], # x1 [ 0, 1, 0], # x2 [ 0, 0, 1], # x3 [ 2, 0, 0], # x1x1 [ 3, 1, 0], # x1x2 [ 4, 0, 1], # x1x3 [ 0, 6, 0], # x2x2 [ 0, 4, 3], # x2x3 [ 0, 0, 8]]]) # x3x3 assert numx.all(grad == reference) finally: mdp.deactivate_extension("gradient")
def test_quadexpan_gradient1(self): """Test validity of gradient for QuadraticExpansionBiNode.""" node = mdp.nodes.QuadraticExpansionNode() x = numx.array([[1, 3, 4]]) node.execute(x) mdp.activate_extension("gradient") try: result = node._gradient(x) grad = result[1]["grad"] reference = numx.array([[ [1, 0, 0], # x1 [0, 1, 0], # x2 [0, 0, 1], # x3 [2, 0, 0], # x1x1 [3, 1, 0], # x1x2 [4, 0, 1], # x1x3 [0, 6, 0], # x2x2 [0, 4, 3], # x2x3 [0, 0, 8] ]]) # x3x3 assert numx.all(grad == reference) finally: mdp.deactivate_extension("gradient")
def test_network_gradient(self): """Test gradient for a small SFA network.""" sfa_node = bimdp.nodes.SFABiNode(input_dim=4 * 4, output_dim=5) switchboard = bimdp.hinet.Rectangular2dBiSwitchboard( in_channels_xy=8, field_channels_xy=4, field_spacing_xy=2) flownode = bimdp.hinet.BiFlowNode(bimdp.BiFlow([sfa_node])) sfa_layer = bimdp.hinet.CloneBiLayer(flownode, switchboard.output_channels) flow = bimdp.BiFlow([switchboard, sfa_layer]) train_gen = [ numx_rand.random((10, switchboard.input_dim)) for _ in range(3) ] flow.train([None, train_gen]) # now can test the gradient mdp.activate_extension("gradient") try: x = numx_rand.random((3, switchboard.input_dim)) result = flow(x, {"method": "gradient"}) grad = result[1]["grad"] assert grad.shape == (3, sfa_layer.output_dim, switchboard.input_dim) finally: mdp.deactivate_extension("gradient")
def testContextDecorator(): """Test the with_extension function decorator.""" class Test1ExtensionNode(mdp.ExtensionNode): extension_name = "__test1" def _testtest(self): pass @mdp.with_extension("__test1") def test(): return mdp.get_active_extensions() # check that the extension is activated assert mdp.get_active_extensions() == [] active = test() assert active == ["__test1"] assert mdp.get_active_extensions() == [] # check that it is only deactiveted if it was activated there mdp.activate_extension("__test1") active = test() assert active == ["__test1"] assert mdp.get_active_extensions() == ["__test1"]
def test_gradient_product(self): """Test that the product of gradients is calculated correctly.""" sfa_node1 = bimdp.nodes.SFABiNode(output_dim=5) sfa_node2 = bimdp.nodes.SFABiNode(output_dim=3) flow = sfa_node1 + sfa_node2 x = numx_rand.random((300, 10)) flow.train(x) mdp.activate_extension("gradient") try: x1 = numx_rand.random((2, 10)) x2, msg = sfa_node1.execute(x1, {"method": "gradient"}) grad1 = msg["grad"] _, msg = sfa_node2.execute(x2, {"method": "gradient"}) grad2 = msg["grad"] grad12 = flow.execute(x1, {"method": "gradient"})[1]["grad"] # use a different way to calculate the product of the gradients, # this method is too memory intensive for large data ref_grad = numx.sum(grad2[:,:,numx.newaxis,:] * numx.transpose(grad1[:,numx.newaxis,:,:], (0,1,3,2)), axis=3) assert numx.amax(abs(ref_grad - grad12)) < 1E-9 finally: mdp.deactivate_extension("gradient")
def test_gradient_product(self): """Test that the product of gradients is calculated correctly.""" sfa_node1 = bimdp.nodes.SFABiNode(output_dim=5) sfa_node2 = bimdp.nodes.SFABiNode(output_dim=3) flow = sfa_node1 + sfa_node2 x = numx_rand.random((300, 10)) flow.train(x) mdp.activate_extension("gradient") try: x1 = numx_rand.random((2, 10)) x2, msg = sfa_node1.execute(x1, {"method": "gradient"}) grad1 = msg["grad"] _, msg = sfa_node2.execute(x2, {"method": "gradient"}) grad2 = msg["grad"] grad12 = flow.execute(x1, {"method": "gradient"})[1]["grad"] # use a different way to calculate the product of the gradients, # this method is too memory intensive for large data ref_grad = numx.sum(grad2[:, :, numx.newaxis, :] * numx.transpose(grad1[:, numx.newaxis, :, :], (0, 1, 3, 2)), axis=3) assert numx.amax(abs(ref_grad - grad12)) < 1E-9 finally: mdp.deactivate_extension("gradient")
def test_network_gradient(self): """Test gradient for a small SFA network.""" sfa_node = bimdp.nodes.SFABiNode(input_dim=4*4, output_dim=5) switchboard = bimdp.hinet.Rectangular2dBiSwitchboard( in_channels_xy=8, field_channels_xy=4, field_spacing_xy=2) flownode = bimdp.hinet.BiFlowNode(bimdp.BiFlow([sfa_node])) sfa_layer = bimdp.hinet.CloneBiLayer(flownode, switchboard.output_channels) flow = bimdp.BiFlow([switchboard, sfa_layer]) train_gen = [numx_rand.random((10, switchboard.input_dim)) for _ in range(3)] flow.train([None, train_gen]) # now can test the gradient mdp.activate_extension("gradient") try: x = numx_rand.random((3, switchboard.input_dim)) result = flow(x, {"method": "gradient"}) grad = result[1]["grad"] assert grad.shape == (3, sfa_layer.output_dim, switchboard.input_dim) finally: mdp.deactivate_extension("gradient")
readoutnode = Oger.nodes.RidgeRegressionNode() flow = mdp.Flow([reservoir, readoutnode]) #Create optDict resParams, readoutParams = OptDicts.getDicts(optDict) gridsearch_parameters = {reservoir:resParams,\ readoutnode:readoutParams} #Create optimizer opt = Oger.evaluation.Optimizer(gridsearch_parameters, evaluationFunction) #=========================================================================== # Uncomment the following to lines to use single process training. #=========================================================================== opt.scheduler = mdp.parallel.ProcessScheduler(n_processes=2, verbose=False) mdp.activate_extension("parallel") #Start gridsearch using nFolds and cross validation opt.grid_search(data, flow, n_folds=nFolds, cross_validate_function=Oger.evaluation.n_fold_random) #Plot minimum errors Evaluation.plotMinErrors(opt.errors, opt.parameters, opt.parameter_ranges, pp) #Plot errorspace along thee axis: i = 0 axisOne = -1 axisTwo = -1
# Generated by codesnippet sphinx extension on 2020-12-16 import mdp import numpy as np np.random.seed(0) import mdp import numpy as np from timeit import Timer x = np.random.rand(3000, 1000) pca_node = mdp.nodes.PCANode() pca_node.train(x) pca_node.stop_training() timer = Timer("pca_node.execute(x)", "from __main__ import pca_node, x") mdp.caching.set_cachedir("/tmp/my_cache") mdp.activate_extension("cache_execute") print timer.repeat(1, 1)[0], 'sec' # Expected: ## 1.188946008682251 sec print timer.repeat(1, 1)[0], 'sec' # Expected: ## 0.112375974655 sec mdp.deactivate_extension("cache_execute") print timer.repeat(1, 1)[0], 'sec' # Expected: ## 0.801102161407 sec mdp.caching.activate_caching( cachedir='/tmp/my_cache', cache_classes=[mdp.nodes.SFANode, mdp.nodes.FDANode], cache_instances=[pca_node])
#Create optDict resParams, readoutParams = OptDicts.getDicts(optDict) gridsearch_parameters = {reservoir:resParams,\ readoutnode:readoutParams} #Create optimizer opt = Oger.evaluation.Optimizer(gridsearch_parameters, evaluationFunction) #=========================================================================== # Uncomment the following to lines to use single process training. #=========================================================================== opt.scheduler = mdp.parallel.ProcessScheduler(n_processes=2, verbose=False) mdp.activate_extension("parallel") #Start gridsearch using nFolds and cross validation opt.grid_search(data, flow, n_folds=nFolds, cross_validate_function=Oger.evaluation.n_fold_random) #Plot minimum errors Evaluation.plotMinErrors(opt.errors, opt.parameters, opt.parameter_ranges, pp) #Plot errorspace along thee axis: i = 0 axisOne = -1 axisTwo = -1 axisThree = -1