Example #1
0
 def test_RMSprop(self):
     updater = opt.RMSprop()
     updates = updater(cost, [b])
     func = theano.function(inputs=[x, y],
                            outputs=cost,
                            updates=updates,
                            allow_input_downcast=True)
     for i in range(500):
         func(data[i], lable[i])
     self.assertTrue(func(1, 1) < acceptThreshold)
Example #2
0
    def reset(self):
        """
        For sequential layerout network, use append().
        
        To add more layers, the first layer is set with setInput().
        Network can do this, because it remember which layer to append to 
        by using member variable currentLayer.
        """
        self.batchsize = 128

        self.currentLayer = None
        self.inputLayers = []
        #layers = []

        self.debug = False

        self.X = T.tensor4()
        self.Y = T.matrix()
        self.inputSizeChecker = None
        self.outputSizeChecker = None

        self.params = []
        self.costFunc = cost.CrossEntropy
        self.gradientOpt = opt.RMSprop()
        self.regulator = reg.Regulator()
        #self.regulator = None
        self.learner = None
        self.predicter = None

        self.cost = None  # local variable (tensor)

        self.nonlinear = None

        self.layerCounter = 0

        self.modelSavePath = './expdata'
        self.modelSavePrefix = 'saved_model_'
        self.modelSaveTimeTemplate = '%Y-%m-%d_%H-%M-%S'
        self.latestLinkName = 'LAST'
        self.modelSaveInterval = 20
        self.modelSaveCounter = 0
        self.lastSaveAbsolutePath = None