def computeNumericalGradient(self, _epsilon, X, y): """Setting parameters to perform numerical gradient checking""" self.Feeder = Feeder(self.mindSingle) self.Feeder.randomWeights() _paramsInitial = np.hstack((np.array(self.Feeder.inputWeights).ravel(), np.array(self.Feeder.hiddenOneWeights).ravel())) self.numgrad = np.zeros(_paramsInitial.shape) _perturb = np.zeros(_paramsInitial.shape) for p in range(len(_paramsInitial)): _perturb[p] = _epsilon self.resetParams(_paramsInitial + _perturb) self.Feeder.feedForward(X, y) _loss2 = costFunction(self.Feeder) self.resetParams(_paramsInitial - _perturb) self.Feeder.feedForward(X, y) _loss1 = costFunction(self.Feeder) #Computing numericak gradient self.numgrad[p] = (_loss2 - _loss1) / (2 * _epsilon) #Return the value we changed back to zero _perturb[p] = 0 self.resetParams(_paramsInitial)
def test_single_randomWeights_hiddenWeights(self): self.assign_io_data() _neurons = 10 _mindSingle = Mindy(self.input, self.output, _neurons, 0.1) _feed = Feeder(_mindSingle) _feed.randomWeights() self.assertEqual(_neurons, len(_feed.hiddenOneWeights))
def test_single_randomWeights_weightsMatrix(self): self.assign_io_data() _neurons = 10 _mindSingle = Mindy(self.input, self.output, _neurons, 0.1) _feed = Feeder(_mindSingle) _feed.randomWeights() self.assertEqual(_neurons * len(self.input.T), len(_feed.inputWeights) * len(_feed.inputWeights.T))
def test_single_outputSum_deltaOutputSum(self): self.assign_io_data() _neurons = 10 _mindSingle = Mindy(self.input, self.output, _neurons, 0.1) _feed = Feeder(_mindSingle) _feed.randomWeights() _feed.feedForward(_mindSingle.inputM, _mindSingle.outputM) _feed.feedBackwards(0.1) self.assertEqual(_feed.deltaOutputSum.shape, self.output.shape)
def test_single_inputLayerBackward_deltaInputChange(self): self.assign_io_data() _neurons = 10 _mindSingle = Mindy(self.input, self.output, _neurons, 0.1) _feed = Feeder(_mindSingle) _feed.randomWeights() _feed.feedForward(_mindSingle.inputM, _mindSingle.outputM) _feed.feedBackwards(0.1) self.assertEqual(_feed.deltaInputChange.shape, _feed.inputWeights.shape)
def test_single_error_oneLower(self): self.assign_io_data() _neurons = 10 _mindSingle = Mindy(self.input, self.output, _neurons, 0.1) _feed = Feeder(_mindSingle) _feed.randomWeights() _feed.feedForward(_mindSingle.inputM, _mindSingle.outputM) _feed.feedBackwards(0.1) _mindSingle.train(100) _errors = MindyErrors(_mindSingle) self.assertLess(_errors.modelError(), 1)
def test_single_train_hiddenWeights(self): self.assign_io_data() _neurons = 10 _mindSingle = Mindy(self.input, self.output, _neurons, 0.1) _feed = Feeder(_mindSingle) _feed.randomWeights() _feed.feedForward(_mindSingle.inputM, _mindSingle.outputM) _feed.feedBackwards(0.1) _mindSingle.train(10) self.assertEqual(_feed.hiddenOneWeights.shape, _feed.deltaHiddenChange.shape)
def test_single_pushForward_predictedOutput(self): self.assign_io_data() _neurons = 10 _mindSingle = Mindy(self.input, self.output, _neurons, 0.1) _feed = Feeder(_mindSingle) _feed.randomWeights() _feed.feedForward(_mindSingle.inputM, _mindSingle.outputM) self.assertEqual(_feed.predictedOutput.shape, self.output.shape)
def train(self, _iterations): """Thinker for the mind""" try: if self.ovr: self.yCats = self.OVR.yCats _i = 0 while _i < self.yCats: """Adding 3 dimensional matrices as weigths with shape in 3rd dim corresponding to number of categories""" self.Feeder = Feeder(self) self.Feeder.randomWeights() self.outputM = self.OVR.outputOvr[:, _i] self.trainingIterations(_iterations) if _i == 0: self.weightsOvr = np.array(self.Feeder.inputWeights) self.hiddenOneWeightsOvr = np.array(self.Feeder.hiddenOneWeights) elif _i > 0: try: #because stack does not let you add a 2 dim to a 3 dim, but concatenate will. Stack only adds a new dim self.weightsOvr = np.stack((self.weightsOvr, np.array(self.Feeder.inputWeights)), axis=0) self.hiddenOneWeightsOvr = np.stack((self.hiddenOneWeightsOvr, np.array(self.Feeder.hiddenOneWeights)), axis=0) except: self.weightsOvr = np.concatenate((self.weightsOvr, np.array(self.Feeder.inputWeights)[np.newaxis, :]), axis=0) self.hiddenOneWeightsOvr = np.concatenate((self.hiddenOneWeightsOvr, np.array(self.Feeder.hiddenOneWeights)[np.newaxis, :]), axis=0) _i += 1 if not self.ovr: self.Feeder = Feeder(self) self.Feeder.randomWeights() self.trainingIterations(_iterations) except Exception as err: traceback.print_exc() self.yhat = np.mean(self.Feeder.predictedOutput, axis=0) self.residual = np.mean(self.Feeder.residual, axis=0)
class TestNumericalGradient(unittest.TestCase): def assign_io_data(self): self.input = np.array([ [0, 0, 0], [1, 1, 1], [1, 0, 1], [0, 0, 1], [1, 1, 1] ]) self.output = np.array([ [0], [1], [1], [0], [1] ]) def test_computeNumericalGradient(self): self.assign_io_data() _neurons = 10 self.mindSingle = Mindy(self.input, self.output, _neurons, 0.1) self.computeNumericalGradient(1e-4, self.mindSingle.inputM, self.mindSingle.outputM) _numgrad = self.numgrad _grad = self.computeGradients() """VISUAL TEST. CHECK IF THE ARE ALMOST EQUAL""" print _numgrad print _grad def computeNumericalGradient(self, _epsilon, X, y): """Setting parameters to perform numerical gradient checking""" self.Feeder = Feeder(self.mindSingle) self.Feeder.randomWeights() _paramsInitial = np.hstack((np.array(self.Feeder.inputWeights).ravel(), np.array(self.Feeder.hiddenOneWeights).ravel())) self.numgrad = np.zeros(_paramsInitial.shape) _perturb = np.zeros(_paramsInitial.shape) for p in range(len(_paramsInitial)): _perturb[p] = _epsilon self.resetParams(_paramsInitial + _perturb) self.Feeder.feedForward(X, y) _loss2 = costFunction(self.Feeder) self.resetParams(_paramsInitial - _perturb) self.Feeder.feedForward(X, y) _loss1 = costFunction(self.Feeder) #Computing numericak gradient self.numgrad[p] = (_loss2 - _loss1) / (2 * _epsilon) #Return the value we changed back to zero _perturb[p] = 0 self.resetParams(_paramsInitial) def computeGradients(self): self.Feeder.feedBackwards(1, _numgrad = True) return np.hstack((self.Feeder.deltaInputChange.ravel(), self.Feeder.deltaHiddenChange.ravel())) def resetParams(self, params): self.Feeder.inputWeights = np.matrix(params[:-self.mindSingle.neurons]).reshape(len(self.mindSingle.inputM.T), self.mindSingle.neurons) self.Feeder.hiddenOneWeights = np.matrix(params[-self.mindSingle.neurons:][:, np.newaxis])
class Mindy(): """Class for a simple neural network, Still under construction. NB: Only one hidden layer""" def __init__(self, inputData, outputData, neurons, learningRate, **kwargs): """initializor, take input and output data and constructs a numpy array, and accepts the amount of intitial neurons""" try: self.inputM = inputData self.outputM = outputData self.learningRate = learningRate except Exception as err: traceback.print_exc() try: self.neurons = int(neurons) except Exception as err: traceback.print_exc() self.yCats = 0 #Index value -> means 1 vector for y self.ovr = False try: for key, value in kwargs.iteritems(): if key == 'multinomial': if value == 'ovr': self.ovr = True self.OVR = OVR(self) self.OVR.oneVsRestCategorial(np.array(self.outputM)) except Exception as err: traceback.print_exc() def train(self, _iterations): """Thinker for the mind""" try: if self.ovr: self.yCats = self.OVR.yCats _i = 0 while _i < self.yCats: """Adding 3 dimensional matrices as weigths with shape in 3rd dim corresponding to number of categories""" self.Feeder = Feeder(self) self.Feeder.randomWeights() self.outputM = self.OVR.outputOvr[:, _i] self.trainingIterations(_iterations) if _i == 0: self.weightsOvr = np.array(self.Feeder.inputWeights) self.hiddenOneWeightsOvr = np.array(self.Feeder.hiddenOneWeights) elif _i > 0: try: #because stack does not let you add a 2 dim to a 3 dim, but concatenate will. Stack only adds a new dim self.weightsOvr = np.stack((self.weightsOvr, np.array(self.Feeder.inputWeights)), axis=0) self.hiddenOneWeightsOvr = np.stack((self.hiddenOneWeightsOvr, np.array(self.Feeder.hiddenOneWeights)), axis=0) except: self.weightsOvr = np.concatenate((self.weightsOvr, np.array(self.Feeder.inputWeights)[np.newaxis, :]), axis=0) self.hiddenOneWeightsOvr = np.concatenate((self.hiddenOneWeightsOvr, np.array(self.Feeder.hiddenOneWeights)[np.newaxis, :]), axis=0) _i += 1 if not self.ovr: self.Feeder = Feeder(self) self.Feeder.randomWeights() self.trainingIterations(_iterations) except Exception as err: traceback.print_exc() self.yhat = np.mean(self.Feeder.predictedOutput, axis=0) self.residual = np.mean(self.Feeder.residual, axis=0) def trainingIterations(self, _iterations): """For training iterations""" _i = 0 while _i < _iterations: try: self.Feeder.feedForward(self.inputM, self.outputM) self.Feeder.feedBackwards(self.learningRate) except Exception as err: traceback.print_exc() break _i += 1 def predict(self, _input): """Using the weights and predicting the output""" if self.ovr: _i = 0 while _i < self.yCats: try: _predict = Predictor(self) if _i == 0: _hiddenToOutput = _predict.calcOutput(_input, self.weightsOvr[_i, :, :], self.hiddenOneWeightsOvr[_i, :, :]) elif _i > 0: _hiddenToOutput = np.hstack((_hiddenToOutput, _predict.calcOutput(_input, self.weightsOvr[_i, :, :], self.hiddenOneWeightsOvr[_i, :, :]))) _i += 1 except Exception as err: traceback.print_exc() raise ValueError('Error: %s -Input for prediction does not match number of input variables in the network' %(err)) _predVal = _predict.chooseOvrPrediction(_hiddenToOutput) #Add the looping to choose the highest value if it is larger than 0.5, otherwise choose 0 sa prediction value elif not self.ovr: _predict = Predictor(self) _hiddenToOutput = _predict.calcOutput(_input, self.Feeder.inputWeights, self.Feeder.hiddenOneWeights) _predVal = _predict.chooseBinPrediction(_hiddenToOutput) return _predVal