def __init__(self, module, dataset = None, init_std = 0.01, noise_std = 0.1, learning_rate = 0.3): for m in module.modules: if m in module.inmodules or m in module.outmodules: continue if isinstance(m, NeuronLayer) and not isinstance(m, BiasUnit) \ and not isinstance(m, TanhLayer): raise ValueError("Only tanh hidden layers are supported.") self.setData(dataset) self.init_std = init_std self.noise_std = noise_std self.lrate = learning_rate self._crbms = [] self._modules = [] Trainer.__init__(self, module)
def __init__(self, module, dataset=None, learningrate=0.01, lrdecay=1.0, momentum=0., verbose=False, batchlearning=False, weightdecay=0.): Trainer.__init__(self, module) self.setData(dataset) self.verbose = verbose self.batchlearning = batchlearning self.weightdecay = weightdecay self.epoch = 0 self.totalepochs = 0 # set up gradient descender self.descent = GradientDescent() self.descent.alpha = learningrate self.descent.momentum = momentum self.descent.alphadecay = lrdecay self.descent.init(module.params)
def __init__(self, module, dataset=None): Trainer.__init__(self, module) #self.setData(dataset) self.ds = dataset self.learner = PGMLearner()