def load_all_weights(model, filepath, include_optimizer=True):
    """Loads the weights of a model saved via `save_all_weights`.
    If model has been compiled, optionally load its optimizer's weights.
    # Arguments
        model: instantiated model with architecture matching the saved model.
            Compile the model beforehand if you want to load optimizer weights.
        filepath: String, path to the saved model.
    # Returns
        None. The model will have its weights updated.
    # Raises
        ImportError: if h5py is not available.
        ValueError: In case of an invalid savefile.
    """
    if h5py is None:
        raise ImportError('`load_all_weights` requires h5py.')

    with h5py.File(filepath, mode='r') as f:
        # set weights
        saving.load_weights_from_hdf5_group(f['model_weights'], model.layers)
        # Set optimizer weights.
        if (include_optimizer
                and 'optimizer_weights' in f and hasattr(model, 'optimizer')
                and model.optimizer):
            optimizer_weights_group = f['optimizer_weights']
            optimizer_weight_names = [n.decode('utf8') for n in
                                      optimizer_weights_group.attrs['weight_names']]
            optimizer_weight_values = [optimizer_weights_group[n] for n in
                                       optimizer_weight_names]
            model.optimizer.set_weights(optimizer_weight_values)
Exemple #2
0
 def load_weights(self,
                  filepath,
                  by_name=True,
                  skip_mismatch=False,
                  reshape=False,
                  exclude=None,
                  verbose=False):
     import h5py
     import re
     from keras.engine import saving
     if h5py is None:
         raise ImportError('`load_weights` requires h5py.')
     keras_model = self.keras_model
     layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\
         else keras_model.layers
     if exclude != None:
         by_name = True
         layers = filter(lambda x: not re.match(exclude, x.name), layers)
     if verbose:
         print('[INFO] Loading following layers: ')
         for layer in layers:
             print('Layer:     ', layer.name)
     with h5py.File(filepath, mode='r') as f:
         if 'layer_names' not in f.attrs and 'model_weights' in f:
             f = f['model_weights']
         if by_name:
             saving.load_weights_from_hdf5_group_by_name(
                 f, layers, skip_mismatch=skip_mismatch, reshape=reshape)
         else:
             saving.load_weights_from_hdf5_group(f, layers, reshape=reshape)
    def load_weights(self, model_path, by_name=True, exclude=None):
        '''Modified version of the corresponding Keras function with
        the addition of multi-GPU support and the ability to exclude
        some layers from loading.
        exclude: list of layer names to exclude
        '''
        import h5py
        from keras.engine import saving

        if exclude:
            by_name = True

        if h5py is None:
            raise ImportError('`load_weights` requires h5py.')
        f = h5py.File(model_path, mode='r')
        if 'layer_names' not in f.attrs and 'model_weights' in f:
            f = f['model_weights']

        # In multi-GPU training, we wrap the model. Get layers
        # of the inner model because they have the weights.
        layers = self.model.inner_model.layers if hasattr(self.model, 'inner_model') \
            else self.model.layers

        # Exclude some layers
        if exclude:
            layers = filter(lambda l: l.name not in exclude, layers)

        if by_name:
            saving.load_weights_from_hdf5_group_by_name(f, layers)
        else:
            saving.load_weights_from_hdf5_group(f, layers)
        if hasattr(f, 'close'):
            f.close()
Exemple #4
0
def load_weights(file_path, model, by_name=False, exclude=None):
    import h5py
    try:
        from keras.engine import saving
    except ImportError:
        # keras before 2.2 used the 'topology'
        from keras.engine import topology as saving

    if exclude:
        by_name = True

    if h5py is None:
        raise ImportError("requires h5py.")

    f = h5py.File(file_path, mode='r')
    if 'layer_names' not in f.attrs and 'model_weights' in f:
        f = f['model_weights']

    # In multi-GPU training, we wrap the model. Get layers
    # of the inner model because they have the weights.
    keras_model = model
    layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model") \
        else keras_model.layers


    # Exclude some layers
    if exclude:
        layers = filter(lambda l: l.name not in exclude, layers)

    if by_name:
        saving.load_weights_from_hdf5_group_by_name(f, layers)
    else:
        saving.load_weights_from_hdf5_group(f, layers)
    if hasattr(f, 'close'):
        f.close()
Exemple #5
0
    def load_weights(self, filepath):
        try:
            from keras.engine import saving
        except ImportError:
            # Keras before 2.2 used the 'topology' namespace.
            from keras.engine import topology as saving

        if h5py is None:
            raise ImportError('`load_weights` requires h5py.')
        f = h5py.File(filepath, mode='r')
        if 'layer_names' not in f.attrs and 'model_weights' in f:
            f = f['model_weights']

        # In multi-GPU training, we wrap the model. Get layers
        # of the inner model because they have the weights.
        keras_model = self.keras_model
        layers = keras_model.inner_model.layers if hasattr(
            keras_model, "inner_model") else keras_model.layers

        try:
            saving.load_weights_from_hdf5_group(f, layers)
            print('load weight from %s' % filepath)

            if hasattr(f, 'close'):
                f.close()
        except:
            print('no weights to load')
Exemple #6
0
    def load_weights(self, filepath, by_name=False, exclude=None):
        import h5py
        from keras.engine import saving

        if exclude:
            by_name = True

        if h5py is None:
            raise ImportError('`load_weights` requires h5py.')

        f = h5py.File(filepath, mode='r')
        if 'layer_names' not in f.attrs and 'model_weights' in f:
            f = f['model_weights']

        # In multi-GPU training, we wrap the model. Get layers
        # of the inner model because they have the weights.
        keras_model = self.keras_model
        layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\
            else keras_model.layers

        # Exclude some layers
        if exclude:
            layers = filter(lambda l: l.name not in exclude, layers)

        if by_name:
            saving.load_weights_from_hdf5_group_by_name(f, layers)
        else:
            saving.load_weights_from_hdf5_group(f, layers)
        if hasattr(f, 'close'):
            f.close()

        # Update the log directory
        self.set_log_dir(filepath)
Exemple #7
0
def load_weights(model,
                 filepath,
                 by_name=False,
                 skip_mismatch=False,
                 reshape=False,
                 consider_weight_name_match=False):
    """Loads all layer weights from a HDF5 save file.

        This method should only be used for testing a model, NOT for resuming a training as the optimization update is not restored. 

        If `by_name` is False (default) weights are loaded
        based on the network's topology, meaning the architecture
        should be the same as when the weights were saved.
        Note that layers that don't have weights are not taken
        into account in the topological ordering, so adding or
        removing layers is fine as long as they don't have weights.

        If `by_name` is True, weights are loaded into layers
        only if they share the same name. This is useful
        for fine-tuning or transfer-learning models where
        some of the layers have changed.

        # Arguments
            filepath: String, path to the weights file to load.
            by_name: Boolean, whether to load weights by layer name
                or by topological order.
            skip_mismatch: Boolean, whether to skip loading of layers
                where there is a mismatch in the number of weights,
                or a mismatch in the shape of the weight
                (only valid when `by_name`=True).
            reshape: Reshape weights to fit the layer when the correct number
                of weight arrays is present but their shape does not match.
            consider_weight_name_match: Boolean, whether to consider loading of layers
                even when there is a mismatch in the number of weights,
                in this case loading any weights that have name and shape match,
                only applicable when `skip_mismatch` = False and `by_name` = True

        # Raises
            ImportError: If h5py is not available.
        """
    if h5py is None:
        raise ImportError('`load_weights` requires h5py.')
    with h5py.File(filepath, mode='r') as f:
        if 'layer_names' not in f.attrs and 'model_weights' in f:
            f = f['model_weights']  # if `filepath` obtained by `save_model`
        if by_name:
            load_weights_from_hdf5_group_by_name(
                f,
                model.layers,
                skip_mismatch=skip_mismatch,
                reshape=reshape,
                consider_weight_name_match=consider_weight_name_match)
        else:
            load_weights_from_hdf5_group(f, model.layers, reshape=reshape)
Exemple #8
0
    def load_weights(self):
        """
        Load and initialise weights
        """

        if not self.model:

            logger.exception(
                "No model. you must build the model first with build_model")

        logger.debug("Loading weights from %s", self.weights_path)

        with h5py.File(self.weights_path, mode='r') as f:
            saving.load_weights_from_hdf5_group(f['model_weights'],
                                                self.model.layers)
Exemple #9
0
    def load_weights(self, filepath, by_name=False, exclude=None):
        '''
        Modified version of the corresponding Keras function with
        the addition of multi-GPU support and the ability to exclude
        some layers from loading.
        :param filepath: path of load weights file, e.g., 'trained_models/mask_rcnn_coco.h5'
        :param by_name: Boolean
        :param exclude: list of layer names to exclude
        :return:
        '''

        import h5py
        # Conditional import to support versions of Keras before 2.2
        # TODO: remove in about 6 months (end of 2018)
        try:
            from keras.engine import saving
        except ImportError:
            # Keras before 2.2 used the 'topology' namespace.
            from keras.engine import topology as saving

        if exclude:
            by_name = True

        if h5py is None:
            raise ImportError('`load_weights` requires h5py.')
        f = h5py.File(filepath, mode='r')
        if 'layer_names' not in f.attrs and 'model_weights' in f:
            f = f['model_weights']

        # In multi-GPU training, we wrap the model. Get layers
        # of the inner model because they have the weights.
        keras_model = self.keras_model
        layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\
            else keras_model.layers

        # Exclude some layers
        if exclude:
            layers = filter(lambda l: l.name not in exclude, layers)

        if by_name:
            saving.load_weights_from_hdf5_group_by_name(f, layers)
        else:
            saving.load_weights_from_hdf5_group(f, layers)
        if hasattr(f, 'close'):
            f.close()

        # Update the log directory
        self.set_log_dir(filepath)
Exemple #10
0
    def load_weights_new(self, filepath, by_name=False,
                     skip_mismatch=False, reshape=False):
        """Loads all layer weights from a HDF5 save file.

        If `by_name` is False (default) weights are loaded
        based on the network's topology, meaning the architecture
        should be the same as when the weights were saved.
        Note that layers that don't have weights are not taken
        into account in the topological ordering, so adding or
        removing layers is fine as long as they don't have weights.

        If `by_name` is True, weights are loaded into layers
        only if they share the same name. This is useful
        for fine-tuning or transfer-learning models where
        some of the layers have changed.

        # Arguments
            filepath: String, path to the weights file to load.
            by_name: Boolean, whether to load weights by name
                or by topological order.
            skip_mismatch: Boolean, whether to skip loading of layers
                where there is a mismatch in the number of weights,
                or a mismatch in the shape of the weight
                (only valid when `by_name`=True).
            reshape: Reshape weights to fit the layer when the correct number
                of weight arrays is present but their shape does not match.


        # Raises
            ImportError: If h5py is not available.
        """

        with h5py.File(filepath, mode='r') as f:
            if 'layer_names' not in f.attrs and 'model_weights' in f:
                f = f['model_weights']
            if by_name:
                saving.load_weights_from_hdf5_group_by_name(
                    f, self.layers, skip_mismatch=skip_mismatch,
                    reshape=reshape)
            else:
                saving.load_weights_from_hdf5_group(
                    f, self.layers, reshape=reshape)
            if hasattr(f, 'close'):
                f.close()
            elif hasattr(f.file, 'close'):
                f.file.close()
Exemple #11
0
    def load_weights(self, pth, by_name=True, exclude=None):
        import h5py
        from keras.engine import saving

        if exclude:
            by_name = True

        f = h5py.File(pth, mode='r')
        if 'layer_names' not in f.attrs and 'model_weights' in f:
            f = f['model_weights']

        # print('\n', 'Pretrained layers')
        pretrained_layers = list(
            [n.decode('utf8') for n in f.attrs['layer_names']])
        # layers = self.model.inner_model.layers if hasattr(model, "inner_model") else self.model.layers

        ls = []
        for layer in self.model.layers:
            if hasattr(layer, 'layers'):
                for l in layer.layers:
                    if l.name in pretrained_layers:
                        # print(l.name)
                        ls.append(l)
            else:
                if layer.name in pretrained_layers:
                    # print(layer.name)
                    ls.append(layer)

        if exclude:
            ls = filter(lambda l: l.name not in exclude, ls)

        if by_name:
            saving.load_weights_from_hdf5_group_by_name(f, ls)
        else:
            saving.load_weights_from_hdf5_group(f, ls)

        for l in ls:
            l.trainable = False

        if hasattr(f, 'close'):
            f.close()

        # print('Trainable layers')
        # self.get_trainable_layers()
        self.set_log_dir(pth)
Exemple #12
0
    def load_weights(self, filepath):
        """
        Custom function for loading weights
        """
        import h5py
        from keras.engine import saving

        if h5py is None:
            raise ImportError("'load_weights' requires h5py.'")
        
        f = h5py.File(filepath, mode = 'r')
        layers = self.QNET_model.layers
        saving.load_weights_from_hdf5_group(f, layers)

        if hasattr(f, 'close'):
            f.close()
        #Update the log directory
        self.set_log_dir(filepath)
def restore_model_state(model, checkpoint_path):
    filepath = checkpoint_path + '_model_and_optimizer.h5'
    f = h5py.File(filepath, mode='r')
    load_weights_from_hdf5_group(f['model_weights'], model.layers)

    training_config = f.attrs.get('training_config')
    training_config = json.loads(training_config.decode('utf-8'))
    optimizer_config = training_config['optimizer_config']
    optimizer = optimizers.deserialize(optimizer_config)

    # model.compile(optimizer=loaded_model.optimizer,
    #               loss=loaded_model.loss, # the loss function has no state
    #               metrics=loaded_model.metrics,
    #               loss_weights=loaded_model.loss_weights,
    #               sample_weight_mode=loaded_model.sample_weight_mode)
    model.optimizer = optimizer
    other_configs = np.load(checkpoint_path + '_other_logs.npz')
    return other_configs['epoch'][0]
    def load_weights(self, filepath, by_name=False, exclude=None):
        """Modified version of the corresponding Keras function with
        the addition of multi-GPU support and the ability to exclude
        some layers from loading.
        exclude: list of layer names to exclude
        """
        self._logger.info('Loading {} model weights from {}...\n'.format(
            'TRAINING' if self.env and self.env.purpose == SuiteActionEnum.TRAINING else 'INFERENCE',
            filepath))
        import h5py
        # Conditional import to support versions of Keras before 2.2
        # TODO: remove in about 6 months (end of 2018)
        try:
            from keras.engine import saving
        except ImportError:
            # Keras before 2.2 used the 'topology' namespace.
            from keras.engine import topology as saving

        if exclude:
            by_name = True

        if h5py is None:
            raise ImportError('`load_weights` requires h5py.')
        f = h5py.File(filepath, mode='r')
        if 'layer_names' not in f.attrs and 'model_weights' in f:
            f = f['model_weights']

        # In multi-GPU training, we wrap the model. Get layers
        # of the inner model because they have the weights.
        keras_model = self.train_model if self.env and self.env.purpose == SuiteActionEnum.TRAINING else self.inference_model

        layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model") \
            else keras_model.layers

        # Exclude some layers
        if exclude:
            layers = filter(lambda l: l.name not in exclude, layers)

        if by_name:
            saving.load_weights_from_hdf5_group_by_name(f, layers)
        else:
            saving.load_weights_from_hdf5_group(f, layers)
        if hasattr(f, 'close'):
            f.close()
    def load_weights(self, model_path, by_name=False, exclude=None):
        """
            Modified version of the corresponding Keras function
            with the addition of multi-GPU support and the ability
            to exclude some layers from loading.
        :param model_path:
        :param by_name:
        :param exclude: list of layer names to exclude
        :return:
        """

        if exclude:
            by_name = True
            pass

        if h5py is None:
            raise ImportError('`load_weights` requires h5py.')
            pass

        model_file = h5py.File(model_path, mode='r')

        if 'layer_names' not in model_file.attrs and 'model_weights' in model_file:
            model_file = model_file['model_weights']

        # In multi-GPU training, we wrap the model. Get layers
        # of the inner model because they have the weights.
        keras_model = self.keras_model

        layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model") else keras_model.layers
        print("layers: {}".format(layers))

        # Exclude some layers
        if exclude:
            layers = filter(lambda l: l.name not in exclude, layers)

        if by_name:
            # TODO ! 测试这里出错
            saving.load_weights_from_hdf5_group_by_name(model_file, layers)
        else:
            saving.load_weights_from_hdf5_group(model_file, layers)
        if hasattr(model_file, 'close'):
            model_file.close()
        pass
Exemple #16
0
def load_weights(keras_model, filepath, by_name=True, exclude=None):
    """Modified version of the corresponding Keras function with
    the addition of multi-GPU support and the ability to exclude
    some layers from loading.
    exclude: list of layer names to exclude
    """
    import h5py
    # Conditional import to support versions of Keras before 2.2
    # TODO: remove in about 6 months (end of 2018)
    try:
        from keras.engine import saving
    except ImportError:
        # Keras before 2.2 used the 'topology' namespace.
        from keras.engine import topology as saving

    if exclude:
        by_name = True

    if h5py is None:
        raise ImportError('`load_weights` requires h5py.')
    f = h5py.File(filepath, mode='r')
    if 'layer_names' not in f.attrs and 'model_weights' in f:
        f = f['model_weights']

    # In multi-GPU training, we wrap the model. Get layers
    # of the inner model because they have the weights.
    #keras_model = self.keras_model
    layers = keras_model.layers
    #layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\
    #   else keras_model.layers

    # Exclude some layers
    if exclude:
        layers = filter(lambda l: l.name not in exclude, layers)

    if by_name:
        saving.load_weights_from_hdf5_group_by_name(f,
                                                    layers,
                                                    skip_mismatch=True)
    else:
        saving.load_weights_from_hdf5_group(f, layers)
    if hasattr(f, 'close'):
        f.close()
    def load_weights(self, filepath, by_name=False, exclude=None):
        """Modified version of the correspoding Keras function with
        the addition of multi-GPU support and the ability to exclude
        some layers from loading.
        exlude: list of layer names to excluce
        """
        import h5py
        # Keras 2.2 use saving
		#try:
        #    from keras.engine import saving
        #except ImportError:
        #   # Keras before 2.2 used the 'topology' namespace.
        #    from keras.engine import topology as saving"""

        if exclude:
            by_name = True

        if h5py is None:
            raise ImportError('`load_weights` requires h5py.')
        f = h5py.File(filepath, mode='r')
        if 'layer_names' not in f.attrs and 'model_weights' in f:
            f = f['model_weights']

        # In multi-GPU training, we wrap the model. Get layers
        # of the inner model because they have the weights.
        keras_model = self.keras_model
        layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\
            else keras_model.layers

        # Exclude some layers
        if exclude:
            layers = filter(lambda l: l.name not in exclude, layers)

        if by_name:
            saving.load_weights_from_hdf5_group_by_name(f, layers)
        else:
            saving.load_weights_from_hdf5_group(f, layers)
        if hasattr(f, 'close'):
            f.close()

        # Update the log directory
        self.set_log_dir(filepath)
Exemple #18
0
def load_weights(file_path, model, by_name=False, exclude=None):
    import h5py
    from keras.engine import saving

    if exclude:
        by_name = True

    if h5py is None:
        raise ImportError("requires h5py.")
    f = h5py.File(file_path, mode='r')
    if 'layer_names' not in f.attrs and 'model_weights' in f:
        f = f['model_weights']

    layers = model.layers
    if exclude:
        layers = filter(lambda l: l.name not in exclude, layers)

    if by_name:
        saving.load_weights_from_hdf5_group_by_name(f, layers)
    else:
        saving.load_weights_from_hdf5_group(f, layers)
    if hasattr(f, 'close'):
        f.close()
Exemple #19
0
    def load_weights(self, filepath, by_name=False, exclude=None):
        """Modified version of the corresponding Keras function with
        the addition of multi-GPU support and the ability to exclude
        some layers from loading.
        exclude: list of layer names to exclude
        """
        import h5py
        # Conditional import to support versions of Keras before 2.2
        # TODO: remove in about 6 months (end of 2018)
        try:
            from keras.engine import saving
        except ImportError:
            # Keras before 2.2 used the 'topology' namespace.
            from keras.engine import topology as saving

        if exclude:
            by_name = True

        if h5py is None:
            raise ImportError('`load_weights` requires h5py.')
        f = h5py.File(filepath, mode='r')
        if 'layer_names' not in f.attrs and 'model_weights' in f:
            f = f['model_weights']

        # In multi-GPU training, we wrap the model. Get layers
        # of the inner model because they have the weights.
        keras_model = self.keras_model
        layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\
            else keras_model.layers

        # Pre-defined layer regular expressions
        layer_regex = {
            # All layers
            "all":
            ".*",
            "mask":
            r"(conv\_.*)|(mask\_.*)",
            # all layers but the backbone
            "heads":
            r"(rpn\_.*)|(fpn\_.*)|(mask\_.*)",
            # From a specific Resnet stage and up
            "2+":
            r"(conv2.*)|(bn_conv2.*)|(conv3.*)|(bn_conv3.*)|(conv4.*)|(bn_conv4.*)|(conv5.*)|(bn_conv5.*)|(conv\_.*)|(block2.*)|(bn_block2.*)|(res2.*)|(bn2.*)|(block3.*)|(bn_block3.*)|(res3.*)|(bn3.*)|(block4.*)|(bn_block4.*)|(res4.*)|(bn4.*)|(block5.*)|(bn_block5.*)|(res5.*)|(bn5.*)|(dw\_.*)|(rpn\_.*)|(fpn\_.*)|(mask\_.*)",
            "3+":
            r"(conv3.*)|(bn_conv3.*)|(conv4.*)|(bn_conv4.*)|(conv5.*)|(bn_conv5.*)|(conv\_.*)|(block3.*)|(bn_block3.*)|(res3.*)|(bn3.*)|(block4.*)|(bn_block4.*)|(res4.*)|(bn4.*)|(block5.*)|(bn_block5.*)|(res5.*)|(bn5.*)|(dw\_.*)|(rpn\_.*)|(fpn\_.*)|(mask\_.*)",
            "4+":
            r"(conv4.*)|(bn_conv4.*)|(conv5.*)|(bn_conv5.*)|(conv\_.*)|(block4.*)|(bn_block4.*)|(res4.*)|(bn4.*)|(block5.*)|(bn_block5.*)|(res5.*)|(bn5.*)|(dw\_.*)|(rpn\_.*)|(fpn\_.*)|(mask\_.*)",
            "5+":
            r"(conv5.*)|(bn_conv5.*)|(conv\_.*)|(res5.*)|(bn5.*)|(dw\_.*)|(rpn\_.*)|(fpn\_.*)|(mask\_.*)",
        }

        # Exclude some layers
        layers_show = layers
        if exclude:
            layers_show = filter(
                lambda l: not bool(re.fullmatch(layer_regex[exclude], l.name)),
                layers_show)
            layers = filter(
                lambda l: not bool(re.fullmatch(layer_regex[exclude], l.name)),
                layers)

        indent = 0
        print("LAYERS LOADED: ")
        for layer in layers_show:
            # Is the layer a model?
            if layer.__class__.__name__ == 'Model':
                print("In model: ", layer.name)
                indent = indent + 4
                continue

            if not layer.weights:
                continue
            log("{}{:20}   ({})".format(" " * indent, layer.name,
                                        layer.__class__.__name__))

        if by_name:
            saving.load_weights_from_hdf5_group_by_name(f, layers)
        else:
            saving.load_weights_from_hdf5_group(f, layers)
        if hasattr(f, 'close'):
            f.close()

        # Update the log directory
        self.set_log_dir(filepath)
Exemple #20
0
            f = f['model_weights']

        # In multi-GPU training, we wrap the model. Get layers
        # of the inner model because they have the weights.
        keras_model = self.keras_model
        layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\
            else keras_model.layers

        # Exclude some layers
        if exclude:
            layers = filter(lambda l: l.name not in exclude, layers)

        if by_name:
            saving.load_weights_from_hdf5_group_by_name(f, layers)
        else:
            saving.load_weights_from_hdf5_group(f, layers)
        if hasattr(f, 'close'):
            f.close()

        # Update the log directory
        self.set_log_dir(filepath)

    def get_imagenet_weights(self):
        """Downloads ImageNet trained weights from Keras.
        Returns path to weights file.
        """
        from keras.utils.data_utils import get_file
        TF_WEIGHTS_PATH_NO_TOP = 'https://github.com/fchollet/deep-learning-models/'\
                                 'releases/download/v0.2/'\
                                 'resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5'
        weights_path = get_file('resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5',
def load_model_from_gpu(model, filepath):
    f = h5py.File(filepath, mode='r')
    load_weights_from_hdf5_group(f['model_weights'], model.layers)