def setUp(self): self.layersExOutputLy = (NnLayer(sigmoid, 4, 1, 3), NnLayer(sigmoid, 3, 1, 10)) self.nn = FeedforwardNeuNet(self.layersExOutputLy, 0, 0.05, 1) self.inputs = rand(7, 4) identityArr = identity(10) y = randint(0, 10, 7) self.targets = array([identityArr[t] for t in y])
class TestSparse_CostFuncGrad(TestCase): def setUp(self): self.layersExOutputLy = (NnLayer(sigmoid, 4, 1, 3), NnLayer(sigmoid, 3, 1, 10)) self.nn = FeedforwardNeuNet(self.layersExOutputLy, 0, 0.05, 1) self.inputs = rand(7, 4) identityArr = identity(10) y = randint(0, 10, 7) self.targets = array([identityArr[t] for t in y]) def test_sparse_CostFuncGradMultiOutputNoWeitDecayNoSparse(self): for x in xrange(3): weights1, weights2 = rand(3, 5), rand(10, 4) self.nn.layersExOutputLy[0].updateForwardWeight(weights1) self.nn.layersExOutputLy[1].updateForwardWeight(weights2) self.nn.forwardPropogateAllInput(self.inputs) # weightDecayParam must set to 0 in order to check my partial derivitives against numerical ones obtained by approx_fprime() check_grad(sparse_CostFunc, sparse_CostFuncGrad, append(weights1, weights2), self.inputs, self.targets, 0, 0.01, 0, self.nn)
def setUp(self): projectRootPath = '/'.join(__file__.replace('\t', '/t').replace('\\', '/').split('/')[:-2]) + '/testDataSet/' forwardWeightAllLayers = loadmat(projectRootPath + 'Theta1.mat')['Theta1'], loadmat(projectRootPath + 'Theta2.mat')['Theta2'] layersExOutputLy = (NnLayer(sigmoid, 400, 1, 25), NnLayer(sigmoid, 25, 1, 10)) layersExOutputLy[0].updateForwardWeight(forwardWeightAllLayers[0]) layersExOutputLy[1].updateForwardWeight(forwardWeightAllLayers[1]) self.inputs = loadmat(projectRootPath + 'X.mat')['X'] self.outputs = loadmat(projectRootPath + 'forwardPropOutputs.mat')['actualOutput'] self.nn = FeedforwardNeuNet(layersExOutputLy, 1, 0.05, 1)
class TestNeuNet(TestCase): def setUp(self): projectRootPath = '/'.join(__file__.replace('\t', '/t').replace('\\', '/').split('/')[:-2]) + '/testDataSet/' forwardWeightAllLayers = loadmat(projectRootPath + 'Theta1.mat')['Theta1'], loadmat(projectRootPath + 'Theta2.mat')['Theta2'] layersExOutputLy = (NnLayer(sigmoid, 400, 1, 25), NnLayer(sigmoid, 25, 1, 10)) layersExOutputLy[0].updateForwardWeight(forwardWeightAllLayers[0]) layersExOutputLy[1].updateForwardWeight(forwardWeightAllLayers[1]) self.inputs = loadmat(projectRootPath + 'X.mat')['X'] self.outputs = loadmat(projectRootPath + 'forwardPropOutputs.mat')['actualOutput'] self.nn = FeedforwardNeuNet(layersExOutputLy, 1, 0.05, 1) def test_forwardPropogateOneInput(self): self.nn.forwardPropogateOneInput(self.inputs[0]) assert_array_almost_equal(self.nn, self.outputs[0]) def test_forwardPropogateAllInput(self): result = self.nn.forwardPropogateAllInput(self.inputs) assert_array_almost_equal(result, self.outputs)