コード例 #1
0
ファイル: test_topology.py プロジェクト: Kartik97/keras
def test_load_layers():
    from keras.layers import ConvLSTM2D, TimeDistributed, Bidirectional, Conv2D, Input
    from keras.models import Model

    if K.backend() == 'tensorflow' or K.backend() == 'cntk':
        inputs = Input(shape=(10, 20, 20, 1))
    else:
        inputs = Input(shape=(10, 1, 20, 20))
    td_conv = TimeDistributed(Conv2D(15, (5, 5)))(inputs)
    bi_convlstm2d = Bidirectional(ConvLSTM2D(10, (3, 3)), merge_mode='concat')(td_conv)
    model = Model(inputs=inputs, outputs=bi_convlstm2d)

    weight_value_tuples = []

    # TimeDistributed Conv2D layer
    # use 'channels_first' data format to check that the function is being called correctly for Conv2D
    # old: (filters, stack_size, kernel_rows, kernel_cols)
    # new: (kernel_rows, kernel_cols, stack_size, filters)
    weight_tensor_td_conv_old = list()
    weight_tensor_td_conv_old.append(np.zeros((15, 1, 5, 5)))
    weight_tensor_td_conv_old.append(np.zeros((15,)))
    td_conv_layer = model.layers[1]
    td_conv_layer.layer.data_format = 'channels_first'
    weight_tensor_td_conv_new = saving.preprocess_weights_for_loading(
        td_conv_layer,
        weight_tensor_td_conv_old,
        original_keras_version='1')
    symbolic_weights = td_conv_layer.weights
    assert (len(symbolic_weights) == len(weight_tensor_td_conv_new))
    weight_value_tuples += zip(symbolic_weights, weight_tensor_td_conv_new)

    # Bidirectional ConvLSTM2D layer
    # old ConvLSTM2D took a list of 12 weight tensors, returns a list of 3 concatenated larger tensors.
    weight_tensor_bi_convlstm_old = []
    for j in range(2):  # bidirectional
        for i in range(4):
            weight_tensor_bi_convlstm_old.append(np.zeros((3, 3, 15, 10)))  # kernel
            weight_tensor_bi_convlstm_old.append(np.zeros((3, 3, 10, 10)))  # recurrent kernel
            weight_tensor_bi_convlstm_old.append(np.zeros((10,)))  # bias

    bi_convlstm_layer = model.layers[2]
    weight_tensor_bi_convlstm_new = saving.preprocess_weights_for_loading(
        bi_convlstm_layer,
        weight_tensor_bi_convlstm_old,
        original_keras_version='1')

    symbolic_weights = bi_convlstm_layer.weights
    assert (len(symbolic_weights) == len(weight_tensor_bi_convlstm_new))
    weight_value_tuples += zip(symbolic_weights, weight_tensor_bi_convlstm_new)

    K.batch_set_value(weight_value_tuples)

    assert np.all(K.eval(model.layers[1].weights[0]) == weight_tensor_td_conv_new[0])
    assert np.all(K.eval(model.layers[1].weights[1]) == weight_tensor_td_conv_new[1])
    assert np.all(K.eval(model.layers[2].weights[0]) == weight_tensor_bi_convlstm_new[0])
    assert np.all(K.eval(model.layers[2].weights[1]) == weight_tensor_bi_convlstm_new[1])
    assert np.all(K.eval(model.layers[2].weights[2]) == weight_tensor_bi_convlstm_new[2])
    assert np.all(K.eval(model.layers[2].weights[3]) == weight_tensor_bi_convlstm_new[3])
    assert np.all(K.eval(model.layers[2].weights[4]) == weight_tensor_bi_convlstm_new[4])
    assert np.all(K.eval(model.layers[2].weights[5]) == weight_tensor_bi_convlstm_new[5])
コード例 #2
0
ファイル: test_topology.py プロジェクト: Kartik97/keras
def test_preprocess_weights_for_loading_for_model(layer):
    model = Sequential([layer])
    weights1 = model.get_weights()
    weights2 = saving.preprocess_weights_for_loading(
        model, convert_weights(layer, weights1),
        original_keras_version='1')
    assert all([np.allclose(x, y, 1e-5)
                for (x, y) in zip(weights1, weights2)])
コード例 #3
0
ファイル: test_topology.py プロジェクト: Kartik97/keras
def test_preprocess_weights_for_loading(layer):
    # A model is needed to initialize weights.
    _ = Sequential([layer])
    weights1 = layer.get_weights()
    weights2 = saving.preprocess_weights_for_loading(
        layer, convert_weights(layer, weights1),
        original_keras_version='1')
    assert all([np.allclose(x, y, 1e-5)
                for (x, y) in zip(weights1, weights2)])
コード例 #4
0
ファイル: test_topology.py プロジェクト: Kartik97/keras
def test_preprocess_weights_for_loading_rnn_should_be_idempotent(layer_class, layer_args):
    """
    Loading weights from a RNN class to itself should not convert the weights.
    """
    # layer can be instantiated only for supported backends
    layer = layer_class(**layer_args)
    # A model is needed to initialize weights.
    _ = Sequential([layer])
    weights1 = layer.get_weights()
    weights2 = saving.preprocess_weights_for_loading(layer, weights1)
    assert all([np.allclose(x, y, 1e-5) for (x, y) in zip(weights1, weights2)])
コード例 #5
0
def gpu_weights_to_cpu_weights(model, cudnn_weights):
    i = 0
    weights2 = []
    for layer in model._layers:
        weight_len = len(layer.weights)
        if weight_len > 0:
            cudnn_weights_layer = cudnn_weights[i:i + weight_len]
            i += weight_len
            weights2_layer = preprocess_weights_for_loading(
                layer, cudnn_weights_layer)
            for j in range(len(weights2_layer)):
                weights2.append(weights2_layer[j])

    return weights2
コード例 #6
0
 def assert_not_compatible(src, dest, message):
     with pytest.raises(ValueError) as ex:
         preprocess_weights_for_loading(
             dest,
             initialize_weights(src).get_weights())
     assert message in ex.value.message
コード例 #7
0
ファイル: test_topology.py プロジェクト: karan6181/keras
def test_load_layers():
    from keras.layers import ConvLSTM2D, TimeDistributed
    from keras.layers import Bidirectional, Conv2D, Input
    from keras.models import Model

    if K.backend() == 'tensorflow' or K.backend() == 'cntk':
        inputs = Input(shape=(10, 20, 20, 1))
    else:
        inputs = Input(shape=(10, 1, 20, 20))
    td_conv = TimeDistributed(Conv2D(15, (5, 5)))(inputs)
    bi_conv = Bidirectional(ConvLSTM2D(10, (3, 3)), merge_mode='concat')(td_conv)
    model = Model(inputs=inputs, outputs=bi_conv)

    weight_value_tuples = []

    # TimeDistributed Conv2D layer
    # use 'channels_first' data format to check that
    # the function is being called correctly for Conv2D
    # old: (filters, stack_size, kernel_rows, kernel_cols)
    # new: (kernel_rows, kernel_cols, stack_size, filters)
    weight_tensor_td_conv_old = list()
    weight_tensor_td_conv_old.append(np.zeros((15, 1, 5, 5)))
    weight_tensor_td_conv_old.append(np.zeros((15,)))
    td_conv_layer = model.layers[1]
    td_conv_layer.layer.data_format = 'channels_first'
    weight_tensor_td_conv_new = saving.preprocess_weights_for_loading(
        td_conv_layer,
        weight_tensor_td_conv_old,
        original_keras_version='1')
    symbolic_weights = td_conv_layer.weights
    assert (len(symbolic_weights) == len(weight_tensor_td_conv_new))
    weight_value_tuples += zip(symbolic_weights, weight_tensor_td_conv_new)

    # Bidirectional ConvLSTM2D layer
    # old ConvLSTM2D took a list of 12 weight tensors,
    # returns a list of 3 concatenated larger tensors.
    weights_bi_conv_old = []
    for j in range(2):  # bidirectional
        for i in range(4):
            weights_bi_conv_old.append(np.zeros((3, 3, 15, 10)))  # kernel
            weights_bi_conv_old.append(np.zeros((3, 3, 10, 10)))  # recurrent kernel
            weights_bi_conv_old.append(np.zeros((10,)))  # bias

    bi_convlstm_layer = model.layers[2]
    weights_bi_conv_new = saving.preprocess_weights_for_loading(
        bi_convlstm_layer,
        weights_bi_conv_old,
        original_keras_version='1')

    symbolic_weights = bi_convlstm_layer.weights
    assert (len(symbolic_weights) == len(weights_bi_conv_new))
    weight_value_tuples += zip(symbolic_weights, weights_bi_conv_new)

    K.batch_set_value(weight_value_tuples)

    assert np.all(K.eval(model.layers[1].weights[0]) == weight_tensor_td_conv_new[0])
    assert np.all(K.eval(model.layers[1].weights[1]) == weight_tensor_td_conv_new[1])
    assert np.all(K.eval(model.layers[2].weights[0]) == weights_bi_conv_new[0])
    assert np.all(K.eval(model.layers[2].weights[1]) == weights_bi_conv_new[1])
    assert np.all(K.eval(model.layers[2].weights[2]) == weights_bi_conv_new[2])
    assert np.all(K.eval(model.layers[2].weights[3]) == weights_bi_conv_new[3])
    assert np.all(K.eval(model.layers[2].weights[4]) == weights_bi_conv_new[4])
    assert np.all(K.eval(model.layers[2].weights[5]) == weights_bi_conv_new[5])
コード例 #8
0
def test_preprocess_weights_for_loading_for_model(layer):
    model = Sequential([layer])
    weights1 = model.get_weights()
    weights2 = saving.preprocess_weights_for_loading(
        model, convert_weights(layer, weights1), original_keras_version='1')
    assert all([np.allclose(x, y, 1e-5) for (x, y) in zip(weights1, weights2)])
コード例 #9
0
ファイル: test_model_saving.py プロジェクト: amaaniqbal/keras
 def assert_not_compatible(src, dest, message):
     with pytest.raises(ValueError) as ex:
         preprocess_weights_for_loading(dest, initialize_weights(src).get_weights())
     assert message in ex.value.message
コード例 #10
0
# Serialize the model and get its weights, for quick re-building.
if os.path.isfile(opt_h5_filename):
    opt_model_config = load_model(opt_h5_filename).get_config()
    SWAP_CUDNN_LAYER = True
else:
    opt_model_config = None
    SWAP_CUDNN_LAYER = False

# This method restores the model from the config, after freezing the model.
# If SWAP_CUDNN_LAYER, this means we have an optimized model (without CuDNN training components),
# and the weights have to be adjusted accordingly.
if SWAP_CUDNN_LAYER:
    output_model = Sequential.from_config(opt_model_config)
    print_weights(original_weights, 'CudnnLSTM_weights')
    weights_fixed = preprocess_weights_for_loading(output_model,
                                                   original_weights)
    print_weights(weights_fixed, 'FIXED: LSTM_weights')
    output_model.set_weights(weights_fixed)
    result = output_model.predict(np.zeros(shape=[1, 2000, 2]))
#     TODO: here run model.predict(zeros) to see if it works.
else:
    # If we don't need to adjust any variables, then we roll with the original config/weights.
    output_model = Sequential.from_config(original_config)
    output_model.set_weights(original_weights)

# Re-build a model where the learning phase is now hard-coded to 0.

temp_dir = "graph"
checkpoint_prefix = os.path.join(temp_dir, "saved_checkpoint")
checkpoint_state_name = "checkpoint_state"
input_graph_name = "untrained_input_graph.pb"
コード例 #11
0
ファイル: utills.py プロジェクト: lonelybeansprouts/transfer
def load_weights_from_hdf5_group_by_name(f,
                                         layers,
                                         skip_mismatch=False,
                                         reshape=False):
    """Implements name-based weight loading.

    (instead of topological weight loading).

    Layers that have no matching name are skipped.

    # Arguments
        f: A pointer to a HDF5 group.
        layers: A list of target layers.
        skip_mismatch: Boolean, whether to skip loading of layers
            where there is a mismatch in the number of weights,
            or a mismatch in the shape of the weights.
        reshape: Reshape weights to fit the layer when the correct number
            of values are present but the shape does not match.

    # Raises
        ValueError: in case of mismatch between provided layers
            and weights file and skip_mismatch=False.
    """
    if 'keras_version' in f.attrs:
        original_keras_version = f.attrs['keras_version'].decode('utf8')
    else:
        original_keras_version = '1'
    if 'backend' in f.attrs:
        original_backend = f.attrs['backend'].decode('utf8')
    else:
        original_backend = None

    # New file format.
    layer_names = load_attributes_from_hdf5_group(f, 'layer_names')

    # Reverse index of layer name to list of layers with name.
    index = {}

    for layer in layers:
        if layer.name:
            index.setdefault(layer.name, []).append(layer)

    print(layer_names)
    print(index.keys())

    # We batch weight value assignments in a single backend call
    # which provides a speedup in TensorFlow.
    weight_value_tuples = []
    for k, name in enumerate(layer_names):
        print(name)
        g = f[name]
        weight_names = load_attributes_from_hdf5_group(g, 'weight_names')
        weight_values = [
            np.asarray(g[weight_name]) for weight_name in weight_names
        ]

        for layer in index.get(name, []):
            symbolic_weights = layer.weights

            symbolic_weights_names = [w.name for w in symbolic_weights]

            weight_values = preprocess_weights_for_loading(
                layer,
                weight_values,
                original_keras_version,
                original_backend,
                reshape=reshape)
            # if len(weight_values) != len(symbolic_weights):
            #     if skip_mismatch:
            #         warnings.warn('Skipping loading of weights for layer {}'.format(layer.name) +
            #                       ' due to mismatch in number of weights' +
            #                       ' ({} vs {}).'.format(len(symbolic_weights), len(weight_values)))
            #         continue
            #     else:
            #         raise ValueError('Layer #' + str(k) +
            #                          ' (named "' + layer.name +
            #                          '") expects ' +
            #                          str(len(symbolic_weights)) +
            #                          ' weight(s), but the saved weights' +
            #                          ' have ' + str(len(weight_values)) +
            #                          ' element(s).')
            # Set values.
            weight_names = [name.split('/')[-1]
                            for name in weight_names]  #delete prefix of name
            symbolic_weights_names = [
                name.split('/')[-1] for name in symbolic_weights_names
            ]
            print(weight_names)
            print(symbolic_weights_names)

            for i in range(len(weight_values)):

                if weight_names[i] in symbolic_weights_names:
                    ii = symbolic_weights_names.index(weight_names[i])

                    if K.int_shape(
                            symbolic_weights[ii]) != weight_values[i].shape:
                        if skip_mismatch:
                            warnings.warn(
                                'Skipping loading of weights for layer {}'.
                                format(layer.name) +
                                ' due to mismatch in shape' +
                                ' ({} vs {}).'.format(
                                    symbolic_weights[ii].shape,
                                    weight_values[i].shape))
                            continue
                        else:
                            raise ValueError(
                                'Layer #' + str(k) + ' (named "' + layer.name +
                                '"), weight ' + str(symbolic_weights[ii]) +
                                ' has shape {}'.format(
                                    K.int_shape(symbolic_weights[ii])) +
                                ', but the saved weight has shape ' +
                                str(weight_values[i].shape) + '.')
                    else:
                        weight_value_tuples.append(
                            (symbolic_weights[ii], weight_values[i]))

    K.batch_set_value(weight_value_tuples)
コード例 #12
0
ファイル: saving.py プロジェクト: thanhnguyentang/pib
def _deserialize_model(h5dict, custom_objects=None, compile=True, **kwargs):
    """De-serializes a model serialized via _serialize_model

    # Arguments
        h5dict: `keras.utils.hdf5_utils.HFDict` instance.
        custom_objects: Optional dictionary mapping names
            (strings) to custom classes or functions to be
            considered during deserialization.
        compile: Boolean, whether to compile the model
            after loading.

    # Returns
        A Keras model instance. If an optimizer was found
        as part of the saved model, the model is already
        compiled. Otherwise, the model is uncompiled and
        a warning will be displayed. When `compile` is set
        to False, the compilation is omitted without any
        warning.
    """
    if not custom_objects:
        custom_objects = {}

    def convert_custom_objects(obj):
        """Handles custom object lookup.

        # Arguments
            obj: object, dict, or list.

        # Returns
            The same structure, where occurrences
                of a custom object name have been replaced
                with the custom object.
        """
        if isinstance(obj, list):
            deserialized = []
            for value in obj:
                deserialized.append(convert_custom_objects(value))
            return deserialized
        if isinstance(obj, dict):
            deserialized = {}
            for key, value in obj.items():
                deserialized[key] = convert_custom_objects(value)
            return deserialized
        if obj in custom_objects:
            return custom_objects[obj]
        return obj

    model_config = h5dict['model_config']
    if model_config is None:
        raise ValueError('No model found in config.')
    model_config = json.loads(model_config.decode('utf-8'))
    model_weights_group = h5dict['model_weights']
    if 'config_modifier' in kwargs:
        model_config = kwargs['config_modifier'](model_config)
        layer_names = []
        for layer in model_config['config']['layers']:
            layer_names.append(layer['name'])
    else:
        layer_names = model_weights_group['layer_names']

    model = model_from_config(model_config, custom_objects=custom_objects)
    if 'keras_version' in model_weights_group:
        original_keras_version = model_weights_group['keras_version'].decode(
            'utf8')
    else:
        original_keras_version = '1'
    if 'backend' in model_weights_group:
        original_backend = model_weights_group['backend'].decode('utf8')
    else:
        original_backend = None

    layers = model.layers

    filtered_layers = []
    for layer in layers:
        weights = layer.weights
        if weights:
            filtered_layers.append(layer)

    filtered_layer_names = []
    for name in layer_names:
        layer_weights = model_weights_group[name]
        weight_names = layer_weights['weight_names']
        if weight_names:
            filtered_layer_names.append(name)

    layer_names = filtered_layer_names
    if len(layer_names) != len(filtered_layers):
        raise ValueError(
            'You are trying to load a weight file'
            ' containing {} layers into a model with {} layers'.format(
                len(layer_names), len(filtered_layers)))

    # We batch weight value assignments in a single backend call
    # which provides a speedup in TensorFlow.
    weight_value_tuples = []
    for k, name in enumerate(layer_names):
        layer_weights = model_weights_group[name]
        weight_names = layer_weights['weight_names']
        weight_values = [
            layer_weights[weight_name] for weight_name in weight_names
        ]
        layer = filtered_layers[k]
        symbolic_weights = layer.weights
        weight_values = preprocess_weights_for_loading(layer,
                                                       weight_values,
                                                       original_keras_version,
                                                       original_backend,
                                                       reshape=False)
        if len(weight_values) != len(symbolic_weights):
            raise ValueError('Layer #' + str(k) + ' (named "' + layer.name +
                             '" in the current model) was found to '
                             'correspond to layer ' + name +
                             ' in the save file. '
                             'However the new layer ' + layer.name +
                             ' expects ' + str(len(symbolic_weights)) +
                             ' weights, but the saved weights have ' +
                             str(len(weight_values)) + ' elements.')
        weight_value_tuples += zip(symbolic_weights, weight_values)
    K.batch_set_value(weight_value_tuples)

    if compile:
        training_config = h5dict.get('training_config')
        if training_config is None:
            warnings.warn('No training configuration found in save file: '
                          'the model was *not* compiled. '
                          'Compile it manually.')
            return model
        training_config = json.loads(training_config.decode('utf-8'))
        optimizer_config = training_config['optimizer_config']
        optimizer = optimizers.deserialize(optimizer_config,
                                           custom_objects=custom_objects)

        # Recover loss functions and metrics.
        try:
            loss = kwargs['loss']
        except:
            loss = convert_custom_objects(training_config['loss'])
        try:
            metrics = kwargs['metrics']
        except:
            metrics = convert_custom_objects(training_config['metrics'])
        try:
            sample_weight_mode = kwargs['sample_weight_mode']
        except:
            sample_weight_mode = training_config['sample_weight_mode']
        try:
            loss_weights = kwargs['loss_weights']
        except:
            loss_weights = training_config['loss_weights']
        try:
            target_tensors = kwargs['target_tensors']
        except:
            target_tensors = None

        # Compile model.
        model.compile(
            optimizer=optimizer,
            loss=loss,
            metrics=metrics,
            loss_weights=loss_weights,
            target_tensors=
            target_tensors,  #(thanhnt): Add `target_tensors` when compiling a restored model. 
            sample_weight_mode=sample_weight_mode)

        # Set optimizer weights.
        if 'optimizer_weights' in h5dict:
            # Build train function (to get weight updates).
            model._make_train_function()
            optimizer_weights_group = h5dict['optimizer_weights']
            optimizer_weight_names = [
                n.decode('utf8')
                for n in optimizer_weights_group['weight_names']
            ]
            optimizer_weight_values = [
                optimizer_weights_group[n] for n in optimizer_weight_names
            ]
            try:
                model.optimizer.set_weights(optimizer_weight_values)
            except ValueError:
                warnings.warn('Error in loading the saved optimizer '
                              'state. As a result, your model is '
                              'starting with a freshly initialized '
                              'optimizer.')
    return model
コード例 #13
0
ファイル: saving.py プロジェクト: thanhnguyentang/pib
def load_weights_from_hdf5_group_by_name(f,
                                         layers,
                                         skip_mismatch=False,
                                         reshape=False,
                                         consider_weight_name_match=False):
    """Implements name-based weight loading.

    (instead of topological weight loading).

    Layers that have no matching name are skipped.

    # Arguments
        f: A pointer to a HDF5 group.
        layers: A list of target layers.
        skip_mismatch: Boolean, whether to skip loading of layers
            where there is a mismatch in the number of weights,
            or a mismatch in the shape of the weights.
        reshape: Reshape weights to fit the layer when the correct number
            of values are present but the shape does not match.
        consider_weight_name_match: Boolean, whether to consider loading of layers
            even when there is a mismatch in the number of weights,
            in this case loading any weights that have name and shape match,
            only applicable when `skip_mismatch` = False

    # Raises
        ValueError: in case of mismatch between provided layers
            and weights file and skip_mismatch=False.
    """
    if 'keras_version' in f.attrs:
        original_keras_version = f.attrs['keras_version'].decode('utf8')
    else:
        original_keras_version = '1'
    if 'backend' in f.attrs:
        original_backend = f.attrs['backend'].decode('utf8')
    else:
        original_backend = None

    # New file format.
    layer_names = load_attributes_from_hdf5_group(f, 'layer_names')

    # Reverse index of layer name to list of layers with name.
    index = {}
    for layer in layers:
        if layer.name:
            index.setdefault(layer.name, []).append(layer)

    # We batch weight value assignments in a single backend call
    # which provides a speedup in TensorFlow.
    weight_value_tuples = []
    for k, name in enumerate(layer_names):
        g = f[name]
        weight_names = load_attributes_from_hdf5_group(g, 'weight_names')
        weight_values = [
            np.asarray(g[weight_name]) for weight_name in weight_names
        ]

        for layer in index.get(name, []):
            symbolic_weights = layer.weights
            weight_values = preprocess_weights_for_loading(
                layer,
                weight_values,
                original_keras_version,
                original_backend,
                reshape=reshape)
            if len(weight_values) != len(symbolic_weights):
                if skip_mismatch:
                    warnings.warn(
                        'Skipping loading of weights for '
                        'layer {}'.format(layer.name) + ' due to mismatch '
                        'in number of weights ({} vs {}).'.format(
                            len(symbolic_weights), len(weight_values)))
                    continue
                else:  #(thanhnt): Allows loading if variable name match (conditioned on variable shape match)
                    if not consider_weight_name_match:
                        raise ValueError(
                            'Layer #' + str(k) + ' (named "' + layer.name +
                            '") expects ' + str(len(symbolic_weights)) +
                            ' weight(s), but the saved weights' + ' have ' +
                            str(len(weight_values)) + ' element(s).' +
                            'Consider set `consider_weight_name_match`' +
                            ' to `True` to load weights by name match.')
                    else:
                        warnings.warn(
                            'Mismatch in '
                            'the number of weights ({} vs {}).'.format(
                                len(symbolic_weights), len(weight_values)) +
                            ' Loading still continues for whichever model variable whose name matches that of the stored variables '
                            '(conditioned on variable shape match).')
                        warning_weights = []
                        for i in range(len(symbolic_weights)):
                            symbolic_shape = K.int_shape(symbolic_weights[i])
                            symbolic_name = symbolic_weights[i].name.split(
                                '/')[-1].split(':')[0]
                            # Look up for any weight name match
                            _check = [  weight_value_tuples.append((symbolic_weights[i], weight_value))    \
                                for weight_name, weight_value in zip(weight_names, weight_values) \
                                    if weight_name.split('/')[-1].split(':')[0] == symbolic_name and \
                                        weight_value.shape == symbolic_shape ]
                            if len(_check) == 0:
                                warning_weights.append(
                                    symbolic_weights[i].name)
                        if len(warning_weights) > 0:
                            warnings.warn(
                                'Skipping loading of weights of some variables for '
                                'layer {}'.format(layer.name) +
                                ' due to mismatch '
                                'in variable names or variable shapes. '
                                'The variables are {}.'.format(warning_weights)
                                + 'The stored variables are {}.'.format(
                                    weight_names))
            else:
                # Set values.
                for i in range(len(weight_values)):
                    symbolic_shape = K.int_shape(symbolic_weights[i])
                    if symbolic_shape != weight_values[i].shape:
                        if skip_mismatch:
                            warnings.warn('Skipping loading of weights for '
                                          'layer {}'.format(layer.name) +
                                          ' due to '
                                          'mismatch in shape ({} vs {}).'.
                                          format(symbolic_weights[i].shape,
                                                 weight_values[i].shape))
                            continue
                        else:
                            raise ValueError(
                                'Layer #' + str(k) + ' (named "' + layer.name +
                                '"), weight ' + str(symbolic_weights[i]) +
                                ' has shape {}'.format(symbolic_shape) +
                                ', but the saved weight has shape ' +
                                str(weight_values[i].shape) + '.')
                    else:
                        weight_value_tuples.append(
                            (symbolic_weights[i], weight_values[i]))

    K.batch_set_value(weight_value_tuples)
コード例 #14
0
def load_weights_from_hdf5_group_new(f, layers, reshape=False):
    """Implements topological (order-based) weight loading.

    # Arguments
        f: A pointer to a HDF5 group.
        layers: a list of target layers.
        reshape: Reshape weights to fit the layer when the correct number
            of values are present but the shape does not match.

    # Raises
        ValueError: in case of mismatch between provided layers
            and weights file.
    """
    if 'keras_version' in f.attrs:
        original_keras_version = f.attrs['keras_version'].decode('utf8')
    else:
        original_keras_version = '1'
    if 'backend' in f.attrs:
        original_backend = f.attrs['backend'].decode('utf8')
    else:
        original_backend = None
    #Problemas en la m.layers[180].weights = [] (attentive)
    filtered_layers = []  #Recibe solo las layers que tienen pesos
    for layer in layers:
        weights = layer.weights
        if weights:
            filtered_layers.append(layer)

    layer_names = saving.load_attributes_from_hdf5_group(f, 'layer_names')
    filtered_layer_names = []
    for name in layer_names:
        g = f[name]
        weight_names = saving.load_attributes_from_hdf5_group(g, 'weight_names')
        if weight_names:
            filtered_layer_names.append(name)
    layer_names = filtered_layer_names
    #print("||||||||||||||||||||||||||||||||||||||")
    #for i in range(100,113):
    #    print(layer_names[i], "===", filtered_layers[i])
    #    print(" ")
    #print(layer_names[107], "===", filtered_layers[107]) #Problema en la 5/22
    #print("------------------------------------")
    #layer = filtered_layers[107]
    #symbolic_weights = layer.weights
    #for nro in range(0,22):
    #    print(nro)
    #    print(symbolic_weights[nro].shape)
    #print("||||||||||||||||||||||||||||||||||||||")
    if len(layer_names) != len(filtered_layers):
        raise ValueError('You are trying to load a weight file '
                         'containing ' + str(len(layer_names)) +
                         ' layers into a model with ' +
                         str(len(filtered_layers)) + ' layers.')

    # We batch weight value assignments in a single backend call
    # which provides a speedup in TensorFlow.
    weight_value_tuples = []
    for k, name in enumerate(layer_names):
        g = f[name]
        weight_names = saving.load_attributes_from_hdf5_group(g, 'weight_names')
        weight_values = [np.asarray(g[weight_name]) for weight_name in weight_names]
        layer = filtered_layers[k]
        symbolic_weights = layer.weights

        weight_values = saving.preprocess_weights_for_loading(layer,
                                                       weight_values,
                                                       original_keras_version,
                                                       original_backend,
                                                       reshape=reshape)
        #if(k == 107):
        #    for nro in range(0,21):
        #        print(weight_values[nro].shape)   
            
        if len(weight_values) != len(symbolic_weights):
            raise ValueError('Layer #' + str(k) +
                             ' (named "' + layer.name +
                             '" in the current model) was found to '
                             'correspond to layer ' + name +
                             ' in the save file. '
                             'However the new layer ' + layer.name +
                             ' expects ' + str(len(symbolic_weights)) +
                             ' weights, but the saved weights have ' +
                             str(len(weight_values)) +
                             ' elements.')
         
        #Zona Hack
        for i in range(len(symbolic_weights)):
            if (symbolic_weights[i].shape != weight_values[i].shape):
                weight_values[i] = np.moveaxis(weight_values[i], (0,1,2,3), (3,2,1,0))
        
        weight_value_tuples += zip(symbolic_weights, weight_values)

    #for i in range(len(weight_value_tuples)):
    #    print(i, weight_value_tuples[i][0].shape,"  =  ",weight_value_tuples[i][1].shape)

    K.batch_set_value(weight_value_tuples)
    print("Procedimiento weights_proc.py finalizado")