def __init__(self, layers, name, lr, momentum = 0.0, n_samples = 1, **kwargs): Notifier.__init__(self) self.momentum = momentum self.sol_shape = list(layers[0].input_shape) self.sol_shape[0] = n_samples self.solution = theano.shared(np.random.uniform(0, 1, size = self.sol_shape).astype(fx), name="solution") self.reset_solution() NNStackLess.__init__(self, layers, layers[0].input, name, **kwargs) DeepDreamer.__init__(self, self.solution, lr, **kwargs) NNFeatures.__init__(self) ParamsBinder.__init__(self, layers) NotifierForwarder.__init__(self, layers) SerializeStack.__init__(self) Monitor.__init__(self) self.notify(Notifier.COMPILE_FUNCTIONS) self.notify(Notifier.REGISTER_PLOTTING)
def __init__(self, **kwargs): assert kwargs[ "convolutional"] == True, "Set 'convolutional = True' in configuration." kwargs['variables'] = create_in_conv(kwargs['name']) Notifier.__init__(self) UnitsCRBMSigmoid.__init__(self) CRBM.__init__(self, **kwargs) CostPCD.__init__(self, **kwargs) SparsityLeeConv.__init__(self, **kwargs) MaxNormRegular.__init__(self, **kwargs) WeightRegular.__init__(self, **kwargs) ActivationCrop.__init__(self, **kwargs) Approximator.__init__(self, **kwargs) Monitor.__init__(self) SerializeLayer.__init__(self) self.notify(Notifier.MAKE_FINISHED) self.notify(Notifier.COMPILE_FUNCTIONS) self.notify(Notifier.REGISTER_PLOTTING)
def __init__(self, **kwargs): assert kwargs[ "convolutional"] == True, "Set 'convolutional = True' in configuration." kwargs['variables'] = create_in_trg_conv(kwargs['name']) Notifier.__init__(self) Plotter.__init__(self) UnitsCNNSigmoid.__init__(self, **kwargs) UnitsDropOut.__init__(self, **kwargs) CNN_BN.__init__(self, **kwargs) CostCrossEntropy.__init__(self, **kwargs) WeightRegular.__init__(self, **kwargs) MaxNormRegular.__init__(self, **kwargs) SparsityLeeConv.__init__(self, **kwargs) SerializeLayer.__init__(self) Monitor.__init__(self) self.notify(Notifier.MAKE_FINISHED) self.notify(Notifier.COMPILE_FUNCTIONS) self.notify(Notifier.REGISTER_PLOTTING)
def __init__(self, **kwargs): kwargs['variables'] = create_in_trg(kwargs['name']) Notifier.__init__(self) UnitsNNTanh.__init__(self) LSTM.__init__(self, act_fun_out=lambda x: T.nnet.sigmoid(x), **kwargs) CostLogLikelihoodBinomial.__init__(self, **kwargs) SparsityLee.__init__(self, **kwargs) weight_params = [ self.Wxc, self.Wxi, self.Wxf, self.Why, self.Whmo, self.Who, self.Whc, self.Wxo, self.Whi, self.Whf ] WeightRegular.__init__(self, wl_targets=weight_params, **kwargs) SerializeLayer.__init__(self) Monitor.__init__(self) Plotter.__init__(self) self.notify(Notifier.MAKE_FINISHED) self.notify(Notifier.COMPILE_FUNCTIONS) self.notify(Notifier.REGISTER_PLOTTING)
def __init__(self, **kwargs): kwargs['variables'] = create_in_trg(kwargs['name']) Notifier.__init__(self) UnitsNNTanh.__init__(self) RNN_Gated.__init__(self, act_fun_out=lambda x: T.nnet.sigmoid(x), **kwargs) CostCrossEntropy.__init__(self, **kwargs) SparsityLee.__init__(self, **kwargs) weight_params = [ self.Wxh, self.Wxr, self.Wxu, self.Whh, self.Why, self.Whr, self.Whu ] WeightRegular.__init__(self, wl_targets=weight_params, **kwargs) SerializeLayer.__init__(self) Monitor.__init__(self) Plotter.__init__(self) self.notify(Notifier.MAKE_FINISHED) self.notify(Notifier.COMPILE_FUNCTIONS) self.notify(Notifier.REGISTER_PLOTTING)
def __init__(self, **kwargs): assert kwargs[ "convolutional"] == True, "Set 'convolutional = True' in configuration." kwargs['variables'] = create_in_trg_conv(kwargs['name']) Notifier.__init__(self) Plotter.__init__(self) try: UnitsCNNReLU.__init__(self, downsample_out=kwargs['downsample_out']) except KeyError: raise ValueError("Entry 'downsample_out' in kwargs needed.") UnitsDropOut.__init__(self, **kwargs) CNN_BN.__init__(self, **kwargs) CostCrossEntropy.__init__(self) WeightRegular.__init__(self, **kwargs) SparsityLeeConv.__init__(self, **kwargs) MaxNormRegular.__init__(self, **kwargs) SerializeLayer.__init__(self) Monitor.__init__(self) self.notify(Notifier.MAKE_FINISHED) self.notify(Notifier.COMPILE_FUNCTIONS) self.notify(Notifier.REGISTER_PLOTTING)
def __init__(self, cost, params, variables, data, batch_size, lr, momentum = 0., grad_clip = None, nan_protection = True, notifier = None): self.params = params LOGGER.debug("Optimizing parameter(s): {0}".format(params)) param_count = np.sum(np.asarray([np.prod(p.get_value().shape) for p in params])) LOGGER.debug("Number parameter(s): {0}".format(param_count)) LOGGER.debug("Given variable(s): {0}".format(variables)) if data is not None: LOGGER.debug("Given data: {0}".format([[k, shape(data[k])] for k in data.keys()])) self.gparams = [theano.shared(x.get_value() * np.cast[x.get_value().dtype](0.0), name = "%s_grad" % x.name) for x in self.params] self.cost = cost if variables is None: self.variables = {} else: self.variables = variables self.data = data self.lr = theano.shared(np.cast[fx](lr)) self.mom = theano.shared(np.cast[fx](momentum)) self.grad_clip = grad_clip print "grad_clip", grad_clip self.nan_protection = nan_protection self.batch_size = batch_size if data is not None: assert len(data) == len(self.variables), \ "You assigned {0} symbolic variable(s), but you provide {1} data set(s): \nVariables: {2}\nDatasets: {3}" \ .format(len(self.variables), len(data), self.variables, self.data.keys()) if notifier is None: notifier = Notifier() notifier.callback_del(Notifier.GET_DATA) for key in variables.keys(): notifier.callback_add(partial(self.get_data_callback, key=key), Notifier.GET_DATA) self.notifier = notifier self.train_model = self.init_updates()