Esempio n. 1
0
def clone_optimizer(optimizer):
    # Requires Keras 1.0.7 since get_config has breaking changes.
    params = dict([(k, v) for k, v in optimizer.get_config().items()])
    config = {
        'class_name': optimizer.__class__.__name__,
        'config': params,
    }
    clone = optimizer_from_config(config)
    return clone
Esempio n. 2
0
def clone_optimizer(optimizer):
    if type(optimizer) is str:
        return get(optimizer)
    params = dict([(k, v) for k, v in optimizer.get_config().items()])
    config = {
        "class_name": optimizer.__class__.__name__,
        "config": params
    }
    clone = optimizer_from_config(config)
    return clone
Esempio n. 3
0
def clone_optimizer(optimizer):
    from keras.optimizers import optimizer_from_config
    if isinstance(optimizer, str):
        return get(optimizer)
    params = dict([(k, v) for k, v in optimizer.get_config().items()])
    config = {
        'class_name': optimizer.__class__.__name__,
        'config': params,
    }
    clone = optimizer_from_config(config)
    return clone
Esempio n. 4
0
def clone_optimizer(optimizer):
    if type(optimizer) is str:
        return optimizers.get(optimizer)
    # Requires Keras 1.0.7 since get_config has breaking changes.
    params = dict([(k, v) for k, v in optimizer.get_config().items()])
    config = {
        'class_name': optimizer.__class__.__name__,
        'config': params,
    }
    if hasattr(optimizers, 'optimizer_from_config'):
        # COMPATIBILITY: Keras < 2.0
        clone = optimizers.optimizer_from_config(config)
    else:
        clone = optimizers.deserialize(config)
    return clone
Esempio n. 5
0
def clone_optimizer(optimizer):
    if type(optimizer) is str:
        return optimizers.get(optimizer)
    # Requires Keras 1.0.7 since get_config has breaking changes.
    params = dict([(k, v) for k, v in optimizer.get_config().items()])
    config = {
        'class_name': optimizer.__class__.__name__,
        'config': params,
    }
    if hasattr(optimizers, 'optimizer_from_config'):
        # COMPATIBILITY: Keras < 2.0
        clone = optimizers.optimizer_from_config(config)
    else:
        clone = optimizers.deserialize(config)
    return clone
Esempio n. 6
0
def load_model(data):
    # instantiate model
    model_config = data['model_config']
    if model_config is None:
        raise ValueError('No model found in config file.')

    model = model_from_config(model_config)
    if hasattr(model, 'flattened_layers'):
        # Support for legacy Sequential/Merge behavior.
        flattened_layers = model.flattened_layers
    else:
        flattened_layers = model.layers

    filtered_layers = []
    for layer in flattened_layers:
        weights = layer.weights
        if weights:
            filtered_layers.append(layer)

    flattened_layers = filtered_layers

    layer_names = data['layer_names']
    filtered_layer_names = []
    for name in layer_names:
        weight_dict = data['model_weights'][name]
        weight_names = weight_dict['weight_names']
        if len(weight_names):
            filtered_layer_names.append(name)
    layer_names = filtered_layer_names
    if len(layer_names) != len(flattened_layers):
        raise ValueError('You are trying to load a weight file '
                         'containing ' + str(len(layer_names)) +
                         ' layers into a model with ' +
                         str(len(flattened_layers)) + ' layers.')

    # We batch weight value assignments in a single backend call
    # which provides a speedup in TensorFlow.
    weight_value_tuples = []
    for k, name in enumerate(layer_names):
        weight_dict = data['model_weights'][name]
        weight_names = weight_dict['weight_names']
        weight_values = weight_dict['weight_values']
        layer = flattened_layers[k]
        symbolic_weights = layer.weights
        if len(weight_values) != len(symbolic_weights):
            raise ValueError('Layer #' + str(k) + ' (named "' + layer.name +
                             '" in the current model) was found to '
                             'correspond to layer ' + name +
                             ' in the save file. '
                             'However the new layer ' + layer.name +
                             ' expects ' + str(len(symbolic_weights)) +
                             ' weights, but the saved weights have ' +
                             str(len(weight_values)) + ' elements.')
        if layer.__class__.__name__ == 'Convolution1D':
            # This is for backwards compatibility with
            # the old Conv1D weights format.
            w = weight_values[0]
            shape = w.shape
            if shape[:2] != (layer.filter_length,
                             1) or shape[3] != layer.nb_filter:
                # Legacy shape:
                # (self.nb_filter, input_dim, self.filter_length, 1)
                assert shape[0] == layer.nb_filter and shape[2:] == (
                    layer.filter_length, 1)
                w = np.transpose(w, (2, 3, 1, 0))
                weight_values[0] = w
        weight_value_tuples += zip(symbolic_weights, weight_values)
    K.batch_set_value(weight_value_tuples)

    # instantiate optimizer
    training_config = data.get('training_config')
    if training_config is None:
        warnings.warn('No training configuration found in save file: '
                      'the model was *not* compiled. Compile it manually.')
        return model
    optimizer_config = training_config['optimizer_config']
    optimizer = optimizer_from_config(optimizer_config)

    # recover loss functions and metrics
    loss = training_config['loss']
    metrics = training_config['metrics']
    sample_weight_mode = training_config['sample_weight_mode']
    loss_weights = training_config['loss_weights']

    # compile model
    model.compile(optimizer=optimizer,
                  loss=loss,
                  metrics=metrics,
                  loss_weights=loss_weights,
                  sample_weight_mode=sample_weight_mode)

    # set optimizer weights
    if 'optimizer_weights' in data:
        # build train function (to get weight updates)
        if isinstance(model, Sequential):
            model.model._make_train_function()
        else:
            model._make_train_function()
        optimizer_weights_dict = data['optimizer_weights']
        optimizer_weight_names = optimizer_weights_dict['weight_names']
        optimizer_weight_values = optimizer_weights_dict['weight_values']
        model.optimizer.set_weights(optimizer_weight_values)
    return model