def dump_model_parameters(model):
    # now show the values of the initial values of all parameters

    assert model
    import keras.backend as K

    print K.batch_get_value(model.weights)
Beispiel #2
0
def weights_match(model1, model2):
    """Determine whether the weights of two Keras models match.

    Args:
        model1: First model
        model2: Second model

    Returns:
        result: True if there is a match

    """
    # Get weights
    weights_m1 = backend.batch_get_value(model1.weights)
    weights_m2 = backend.batch_get_value(model2.weights)

    # Evaluate equivalence
    if all([np.all(w == ow) for w, ow in zip(weights_m1, weights_m2)]):
        result = True
    else:
        result = False
    return result
Beispiel #3
0
	def _get_weight_names_and_values_from_symbolic(self, symbolic_weights):
		import keras.backend as K				# pylint: disable=import-error
		weight_values = K.batch_get_value(symbolic_weights)
		weight_names = [
			(
				str(w.name) if hasattr(w, 'name') and w.name \
					else 'param_{}'.format(i)
			)
			for i, (w, val) in enumerate(
				zip(symbolic_weights, weight_values)
			)
		]
		return weight_names, weight_values
Beispiel #4
0
 def XXXget_layer_weights(self):
     """
     get the layers of the current model...
     """
     result = {} 
     jlayer=0
     for layer in self.model.layers:
         symbolic_weights = layer.trainable_weights + layer.non_trainable_weights
         weight_values = K.batch_get_value(symbolic_weights)
         layer_list=[]
         for i, (w, val) in enumerate(zip(symbolic_weights, weight_values)):
             layer_list.append(val)
         print("ADDING W",jlayer)
         result[jlayer] = layer_list
         jlayer += 1
     return result 
Beispiel #5
0
 def apply_ema_weights(self):
     """备份原模型权重,然后将平均权重应用到模型上去。
     """
     self.old_weights = K.batch_get_value(self.model.weights)
     ema_weights = K.batch_get_value(self.ema_weights)
     K.batch_set_value(zip(self.model.weights, ema_weights))
Beispiel #6
0
 def initialize(self):
     """ema_weights初始化跟原模型初始化一致。
     """
     self.old_weights = K.batch_get_value(self.model.weights)
     K.batch_set_value(zip(self.ema_weights, self.old_weights))
Beispiel #7
0
 def get_weights(self):
     weights = []
     for cell in self.cells:
         if isinstance(cell, Layer):
             weights += cell.weights
     return K.batch_get_value(weights)
Beispiel #8
0
 def get_weights(self):
     # ref: https://github.com/keras-team/keras/blob/c10d24959b0ad615a21e671b180a1b2466d77a2b/keras/engine/base_layer.py#L21
     params = self.weights
     weights = K.batch_get_value(params)
     return weights[0] * K.get_value(self.tree_weight), weights[1]
Beispiel #9
0
def reduce_model(model):
    """Returns a dict representing the state of the model.

    The dict contains:
        - the model's configuration (topology)
        - the model's weights
        - the model's optimizer's state (if any)

    Thus the model can be reinstantiated in
    the returned dict, without any of the code
    used for model definition or training.

    # Arguments
        model: Keras model instance.
    """
    def get_config(obj):
        # if obj is a serializable Keras class instance
        # e.g. optimizer, layer
        if hasattr(obj, 'get_config'):
            return {
                'class_name': obj.__class__.__name__,
                'config': obj.get_config()
            }
        return obj

    attrs = {}
    attrs['keras_version'] = str(keras_version).encode('utf8')
    attrs['backend'] = K.backend().encode('utf8')
    round_tripped_model = model_from_config(get_config(model))
    attrs['model_config'] = get_config(round_tripped_model)

    model_weights = {}
    attrs['model_weights'] = model_weights
    if legacy_models.needs_legacy_support(model):
        model_layers = legacy_models.legacy_sequential_layers(model)
    else:
        model_layers = model.layers

    model_weights['layer_names'] = [
        layer.name.encode('utf8') for layer in model_layers
    ]
    model_weights['backend'] = K.backend().encode('utf8')
    model_weights['keras_version'] = str(keras_version).encode('utf8')

    for layer in model_layers:
        g = {}
        model_weights[layer.name] = g
        symbolic_weights = layer.weights
        weight_values = K.batch_get_value(symbolic_weights)
        weight_names = []
        g['weight_names'] = weight_names
        for i, (w, val) in enumerate(zip(symbolic_weights, weight_values)):
            if hasattr(w, 'name') and w.name:
                name = str(w.name)
            else:
                name = 'param_' + str(i)
            weight_names.append(name)
            g[name] = val

    if hasattr(model, 'optimizer'):
        if isinstance(model.optimizer, optimizers.TFOptimizer):
            warnings.warn(
                'TensorFlow optimizers do not '
                'make it possible to access '
                'optimizer attributes or optimizer state '
                'after instantiation. '
                'As a result, we cannot save the optimizer '
                'as part of the model save file.'
                'You will have to compile your model again after loading it. '
                'Prefer using a Keras optimizer instead '
                '(see keras.io/optimizers).')
        else:
            attrs['training_config'] = t.valmap(
                get_config, {
                    'optimizer_config': model.optimizer,
                    'loss': model.loss,
                    'metrics': model.metrics,
                    'sample_weight_mode': model.sample_weight_mode,
                    'loss_weights': model.loss_weights,
                })

            # Save optimizer weights.
            symbolic_weights = getattr(model.optimizer, 'weights')
            if symbolic_weights:
                optimizer_weights = {}
                attrs['optimizer_weigts'] = optimizer_weights
                weight_values = K.batch_get_value(symbolic_weights)
                for i, (w,
                        val) in enumerate(zip(symbolic_weights,
                                              weight_values)):
                    # Default values of symbolic_weights is /variable for theano
                    if K.backend() == 'theano':
                        if hasattr(w, 'name') and w.name != "/variable":
                            name = str(w.name)
                        else:
                            name = 'param_' + str(i)
                    else:
                        if hasattr(w, 'name') and w.name:
                            name = str(w.name)
                        else:
                            name = 'param_' + str(i)
                    optimizer_weights[name] = val
    return attrs