def test(self, fImgs, fLbls, fName): '''Neural network testing after training. INPUT : Images set, labels set (None for autoencoders), name for save. OUTPUT : Nothing''' if PREPROCESSING: fImgs, _key = ld.normalization(fName, fImgs) print "Testing the neural networks..." _out = self.propagation(fImgs.T) _cost = self.error(_out[-1], fImgs.T) / len(fImgs) print "Cost {0}\n".format(_cost) # Save output in order to have a testset for next layers if fName is not None: self.save_output(fName, "test", fImgs) # Displaying the results if PREPROCESSING: _decrypt = np.dot(_out[-1],_key.T) dy.display(fName, "out", fImgs, _decrypt.T) else: dy.display(fName, "out", fImgs, _out[-1].T) # Approximated vision of first hidden layer neurons dy.display(fName, "neurons", self.neurons_vision())
def test(self, fImgs, fLbls, fName): '''Neural network testing after training. INPUT : Images set, labels set (None for autoencoders), name for save. OUTPUT : Nothing''' if PREPROCESSING: fImgs, _key = ld.normalization(fName, fImgs) print "Testing the neural networks..." _out = self.propagation(fImgs.T) _cost = self.error(_out[-1], fImgs.T) / len(fImgs) print "Cost {0}\n".format(_cost) # Save output in order to have a testset for next layers if fName is not None: self.save_output(fName, "test", fImgs) # Displaying the results if PREPROCESSING: _decrypt = np.dot(_out[-1], _key.T) dy.display(fName, "out", fImgs, _decrypt.T) else: dy.display(fName, "out", fImgs, _out[-1].T) # Approximated vision of first hidden layer neurons dy.display(fName, "neurons", self.neurons_vision())
def train(self, fImgs, fLbls, fIterations, fName): '''Training algorithm. Can evolved according to your need. INPUT : Images set, labels set (None for autoencoders), number of iterations before stopping, name for save OUTPUT : Nothing''' if PREPROCESSING: fImgs, _key = ld.normalization(fName, fImgs) print "Training...\n" _gcost = [] _gtime = [] _done = fIterations for i in xrange(fIterations): _gtime.append(tm.time()) _gcost.append(0) for j in xrange(self.mCycle): _trn, _tst = self.cross_validation(j, fImgs) for k in xrange(len(_trn) / self.mBatchSize): if DEBUG: print "Learning rates :", self.mEpsilon print "Momentums :", self.mMomentum # Inputs and labels batch _in = self.build_batch(k, _trn) # Activation propagation _out = self.propagation(_in, DROPOUT) # Local error for each layer _err = self.layer_error(_out, _in, SPARSITY) # Gradient for stochastic gradient descent _wGrad, _bGrad = self.gradient(_err, _out) # Gradient checking if GRAD_CHECK: print "Gradient checking ..." self.gradient_checking(_in,_in,_wGrad,_bGrad) # Adapt learning rate if (i > 0 or j > 0 or k > 0) and ANGLE_DRIVEN: self.angle_driven_approach(_wGrad) # Weight variations self.variations(_wGrad) # Update weights and biases self.update(_bGrad) # Adapt learning rate if AVG_GRADIENT: self.average_gradient_approach(_wGrad) # Evaluate the network _cost = self.evaluate(_tst) _gcost[i] += _cost if DEBUG: print "Cost :", _cost # Iteration information _gtime[i] = tm.time() - _gtime[i] print "Iteration {0} in {1}s".format(i, _gtime[i]) # Global cost for one cycle _gcost[i] /= self.mCycle print "Cost of iteration : {0}".format(_gcost[i]) # Parameters print "Epsilon {0} Momentum {1}\n".format(self.mEpsilon, self.mMomentum) # Stop condition if i > 0 and abs(_gcost[i-1] - _gcost[i]) < 0.001: _done = i + 1 break elif self.mStop: _done = i + 1 break dy.plot(xrange(_done), _gcost, fName, "_cost.png") dy.plot(xrange(_done), _gtime, fName, "_time.png") if fName is not None: self.save_output(fName, "train", fImgs)
def train(self, fImgs, fLbls, fIterations, fName): '''Training algorithm. Can evolved according to your need. INPUT : Images set, labels set (None for autoencoders), number of iterations before stopping, name for save OUTPUT : Nothing''' if PREPROCESSING: fImgs, _key = ld.normalization(fName, fImgs) print "Training...\n" _gcost = [] _gtime = [] _done = fIterations for i in xrange(fIterations): _gtime.append(tm.time()) _gcost.append(0) for j in xrange(self.mCycle): _trn, _tst = self.cross_validation(j, fImgs) for k in xrange(len(_trn) / self.mBatchSize): if DEBUG: print "Learning rates :", self.mEpsilon print "Momentums :", self.mMomentum # Inputs and labels batch _in = self.build_batch(k, _trn) # Activation propagation _out = self.propagation(_in, DROPOUT) # Local error for each layer _err = self.layer_error(_out, _in, SPARSITY) # Gradient for stochastic gradient descent _wGrad, _bGrad = self.gradient(_err, _out) # Gradient checking if GRAD_CHECK: print "Gradient checking ..." self.gradient_checking(_in, _in, _wGrad, _bGrad) # Adapt learning rate if (i > 0 or j > 0 or k > 0) and ANGLE_DRIVEN: self.angle_driven_approach(_wGrad) # Weight variations self.variations(_wGrad) # Update weights and biases self.update(_bGrad) # Adapt learning rate if AVG_GRADIENT: self.average_gradient_approach(_wGrad) # Evaluate the network _cost = self.evaluate(_tst) _gcost[i] += _cost if DEBUG: print "Cost :", _cost # Iteration information _gtime[i] = tm.time() - _gtime[i] print "Iteration {0} in {1}s".format(i, _gtime[i]) # Global cost for one cycle _gcost[i] /= self.mCycle print "Cost of iteration : {0}".format(_gcost[i]) # Parameters print "Epsilon {0} Momentum {1}\n".format(self.mEpsilon, self.mMomentum) # Stop condition if i > 0 and abs(_gcost[i - 1] - _gcost[i]) < 0.001: _done = i + 1 break elif self.mStop: _done = i + 1 break dy.plot(xrange(_done), _gcost, fName, "_cost.png") dy.plot(xrange(_done), _gtime, fName, "_time.png") if fName is not None: self.save_output(fName, "train", fImgs)