def on_epoch_begin(self, epoch, logs={}): assert hasattr(self.model.optimizer, 'lr'), \ 'Optimizer must have a "lr" attribute.' lr = self.schedule(epoch) assert type( lr ) == float, 'The output of the "schedule" function should be float.' K.set_value(self.model.optimizer.lr, lr)
def set_weights(self, weights): '''Set the weights of the unit. weights: a list of numpy arrays. The number of arrays and their shape must match number of the dimensions of the weights of the unit (i.e. it should match the output of `get_weights`). ''' assert len(self.params) == len(weights), ('Provided weight array does not match unit weights (' + str(len(self.params)) + ' unit params vs. ' + str(len(weights)) + ' provided weights)') for p, w in zip(self.params, weights): if K.get_value(p).shape != w.shape: raise Exception('Weight shape %s not compatible with weight shape %s.' % (K.get_value(p).shape, w.shape)) K.set_value(p, w)
def set_weights(self, weights): '''Set the weights of the unit. weights: a list of numpy arrays. The number of arrays and their shape must match number of the dimensions of the weights of the unit (i.e. it should match the output of `get_weights`). ''' assert len(self.params) == len(weights), ( 'Provided weight array does not match unit weights (' + str(len(self.params)) + ' unit params vs. ' + str(len(weights)) + ' provided weights)') for p, w in zip(self.params, weights): if K.get_value(p).shape != w.shape: raise Exception( 'Weight shape %s not compatible with weight shape %s.' % (K.get_value(p).shape, w.shape)) K.set_value(p, w)
def on_epoch_begin(self, epoch, logs={}): assert hasattr(self.model.optimizer, 'lr'), \ 'Optimizer must have a "lr" attribute.' lr = self.schedule(epoch) assert type(lr) == float, 'The output of the "schedule" function should be float.' K.set_value(self.model.optimizer.lr, lr)
def set_state(self, value_list): assert len(self.updates) == len(value_list) for u, v in zip(self.updates, value_list): K.set_value(u[0], v)