def on_epoch_begin(self, epoch, logs={}): vals = self.nlayerinput(self.entropy_train_data) dists = self.get_dists(vals) dists += 10e20 * np.eye(dists.shape[0]) r = scipy.optimize.minimize(self.obj, K.get_value(self.kdelayer.logvar).flat[0], jac=self.jac, args=(dists,), ) best_val = r.x.flat[0] K.set_value(self.kdelayer.logvar, best_val)
def on_epoch_begin(self, epoch, logs={}): r = scipy.optimize.minimize(self.obj, K.get_value(self.noiselayer.logvar), jac=self.jac) best_val = r.x[0] cval = K.get_value(self.noiselayer.logvar) max_var = 1.0 + cval if best_val > max_var: # don't raise it too fast, so that gradient information is preserved best_val = max_var K.set_value(self.noiselayer.logvar, best_val)
def build(self, input_shape): super(GaussianNoise2, self).build(input_shape) K.set_value(self.logvar, self.init_logvar) #K.set_value(self.alpha, self.init_alpha) if self.is_trainable: self.trainable_weights = [self.logvar,] else: self.trainable_weights = [] if self.mi_regularizer: self.add_loss(self.mi_regularizer())
def jac(logvar): v = K.get_value(self.noiselayer.logvar) K.set_value(self.noiselayer.logvar, logvar.flat[0]) r = np.atleast_2d(np.array(jacfunc([self.traindata.X, self.traindata.Y, sampleweights, 1])))[0] K.set_value(self.noiselayer.logvar, v) return r
def obj(logvar): v = K.get_value(self.noiselayer.logvar) K.set_value(self.noiselayer.logvar, logvar.flat[0]) r = lossfunc([self.traindata.X, self.traindata.Y, sampleweights, 1])[0] K.set_value(self.noiselayer.logvar, v) return r
def build(self, input_shape): super(KDEParamLayer, self).build(input_shape) K.set_value(self.logvar, self.init_logvar) self.trainable_weights = []