def load_weights(self, filepath, by_name=False, exclude=None): """Modified version of the correspoding Keras function with the addition of multi-GPU support and the ability to exclude some layers from loading. exlude: list of layer names to excluce """ import h5py from keras.engine import topology if exclude: by_name = True if h5py is None: raise ImportError('`load_weights` requires h5py.') f = h5py.File(filepath, mode='r') if 'layer_names' not in f.attrs and 'model_weights' in f: f = f['model_weights'] # In multi-GPU training, we wrap the model. Get layers # of the inner model because they have the weights. keras_model = self.keras_model layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model") \ else keras_model.layers # Exclude some layers if exclude: layers = filter(lambda l: l.name not in exclude, layers) if by_name: topology.load_weights_from_hdf5_group_by_name(f, layers) else: topology.load_weights_from_hdf5_group(f, layers) if hasattr(f, 'close'): f.close()
def load_all_weights(model, filepath, include_optimizer=True): """Loads the weights of a model saved via `save_all_weights`. If model has been compiled, optionally load its optimizer's weights. # Arguments model: instantiated model with architecture matching the saved model. Compile the model beforehand if you want to load optimizer weights. filepath: String, path to the saved model. # Returns None. The model will have its weights updated. # Raises ImportError: if h5py is not available. ValueError: In case of an invalid savefile. """ if h5py is None: raise ImportError('`load_all_weights` requires h5py.') with h5py.File(filepath, mode='r') as f: # set weights topology.load_weights_from_hdf5_group(f['model_weights'], model.layers) # Set optimizer weights. if include_optimizer and 'optimizer_weights' in f and hasattr( model, 'optimizer') and model.optimizer: optimizer_weights_group = f['optimizer_weights'] optimizer_weight_names = [ n.decode('utf8') for n in optimizer_weights_group.attrs['weight_names'] ] optimizer_weight_values = [ optimizer_weights_group[n] for n in optimizer_weight_names ] model.optimizer.set_weights(optimizer_weight_values)
def load_weight(self, filepath, by_name=False, exclude=None): import h5py from keras.engine import topology if h5py is None: raise ImportError('`load_weights` requires h5py.') f = h5py.File(filepath, mode='r') if 'layer_names' not in f.attrs and 'model_weights' in f: f = f['model_weights'] # In multi-GPU training, get layers of inner model keras_model = self.keras_model layers = keras_model.inner_model.layers if hasattr( keras_model, "inner_model") else keras_model.layers # Exclude some layers if exclude: by_name = True layers = filter(lambda l: l.name not in exclude, layers) if by_name: topology.load_weights_from_hdf5_group_by_name(f, layers) else: topology.load_weights_from_hdf5_group(f, layers) if hasattr(f, 'close'): f.close() # Update the log directory self.set_log_dir(filepath)
def load_weights(filepath, model): if h5py is None: raise ImportError('`load_weights` requires h5py.') with h5py.File(filepath, mode='r') as f: # set weights topology.load_weights_from_hdf5_group(f['model_weights'], model.layers) return model
def load_state(model, outdir, rank): fname = "{}/weights_opt_{}.h5".format(outdir, rank) # keras.engine.network.py, l. 1124+ with h5py.File(fname, 'r') as f: f = h5py.File(fname, mode='r') weights = f if 'layer_names' not in f.attrs and 'model_weights' in f: weights = f['model_weights'] topology.load_weights_from_hdf5_group(weights, model.layers) load_optimizer(model, f)
def load_weights(self, filepath, by_name=False, exclude=None): """ Modified version of the correspoding Keras function with the addition of multi-GPU support and the ability to exclude some layers from loading. exlude: list of layer names to excluce """ import h5py from keras.engine import topology print('>>> load_weights()') if exclude: by_name = True if h5py is None: raise ImportError('`load_weights` requires h5py.') log(' load_weights: Loading weights from: {}'.format(filepath)) f = h5py.File(filepath, mode='r') if 'layer_names' not in f.attrs and 'model_weights' in f: f = f['model_weights'] # In multi-GPU training, we wrap the model. Get layers # of the inner model because they have the weights. keras_model = self.keras_model layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\ else keras_model.layers # Exclude some layers if exclude: layers = filter(lambda l: l.name not in exclude, layers) # print(' layers to load ' ) # print('----------------' ) # for idx,layer in enumerate(layers): # print('>layer {} : name : {:40s} type: {}'.format(idx,layer.name,layer)) if by_name: topology.load_weights_from_hdf5_group_by_name(f, layers) else: topology.load_weights_from_hdf5_group(f, layers) if hasattr(f, 'close'): f.close() log(' load_weights: Log directory set to : {}'.format(filepath)) # Update the log directory self.set_log_dir(filepath) print(' Load weights complete : ', filepath) return (filepath)
def load_weights(self, filepath, by_name=False, exclude=None, verbose=False, verboseverbose=False): """Modified version of the correspoding Keras function with the addition of multi-GPU support and the ability to exclude some layers from loading. exlude: list of layer names to excluce """ import h5py from keras.engine import topology from utils.misc import load_weights_from_hdf5_group_by_name if exclude: by_name = True if h5py is None: raise ImportError('`load_weights` requires h5py.') with h5py.File(filepath, mode='r') as f: # This function only handles f with 'layer_names' #if 'layer_names' not in f.attrs and 'model_weights' in f: # f = f['model_weights'] # In multi-GPU training, we wrap the model. Get layers # of the inner model because they have the weights. keras_model = self.keras_model layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\ else keras_model.layers # Exclude some layers if exclude: layers = filter(lambda l: l.name not in exclude, layers) if by_name: # use our own loading function in utils.misc load_weights_from_hdf5_group_by_name( f, layers, verbose=verbose, verboseverbose=verboseverbose) else: topology.load_weights_from_hdf5_group(f, layers) # Update the log directory self.set_log_dir(filepath)
def load_model(filepath, custom_objects=None, compile=True): """Loads an Evolutron model saved via Model.save(). # Arguments filepath: String, path to the saved model. custom_objects: Optional dictionary mapping names (strings) to custom classes or functions to be considered during deserialization. compile: Boolean, whether to compile the model after loading. # Returns An Evolutron model instance. If an optimizer was found as part of the saved model, the model is already compiled. Otherwise, the model is uncompiled and a warning will be displayed. When `compile` is set to False, the compilation is omitted without any warning. # Raises ImportError: if h5py is not available. ValueError: In case of an invalid savefile. """ if h5py is None: raise ImportError('`load_model` requires h5py.') if not custom_objects: custom_objects = {} def convert_custom_objects(obj): """Handles custom object lookup. # Arguments obj: object, dict, or list. # Returns The same structure, where occurences of a custom object name have been replaced with the custom object. """ if isinstance(obj, list): deserialized = [] for value in obj: if value in custom_objects: deserialized.append(custom_objects[value]) else: deserialized.append(value) return deserialized if isinstance(obj, dict): deserialized = {} for key, value in obj.items(): deserialized[key] = [] if isinstance(value, list): for element in value: if element in custom_objects: deserialized[key].append(custom_objects[element]) else: deserialized[key].append(element) elif value in custom_objects: deserialized[key] = custom_objects[value] else: deserialized[key] = value return deserialized if obj in custom_objects: return custom_objects[obj] return obj f = h5py.File(filepath, mode='r') # instantiate model model_config = f.attrs.get('model_config') if model_config is None: raise ValueError('No model found in config file.') model_config = json.loads(model_config.decode('utf-8')) globs = globals() # All layers. globs['Model'] = Model model = deserialize_keras_object(model_config, module_objects=globs, custom_objects=custom_objects, printable_module_name='layer') # set weights topology.load_weights_from_hdf5_group(f['model_weights'], model.layers) # Early return if compilation is not required. if not compile: f.close() return model # instantiate optimizer training_config = f.attrs.get('training_config') if training_config is None: warnings.warn('No training configuration found in save file: ' 'the model was *not* compiled. Compile it manually.') f.close() return model training_config = json.loads(training_config.decode('utf-8')) optimizer_config = training_config['optimizer_config'] optimizer = opt.deserialize(optimizer_config, custom_objects=custom_objects) # Recover loss functions and metrics. loss = convert_custom_objects(training_config['loss']) metrics = convert_custom_objects(training_config['metrics']) sample_weight_mode = training_config['sample_weight_mode'] loss_weights = training_config['loss_weights'] # Compile model. model.compile(optimizer=optimizer, loss=loss, metrics=metrics, loss_weights=loss_weights, sample_weight_mode=sample_weight_mode) # Set optimizer weights. if 'optimizer_weights' in f: # Build train function (to get weight updates). model._make_train_function() optimizer_weights_group = f['optimizer_weights'] optimizer_weight_names = [ n.decode('utf8') for n in optimizer_weights_group.attrs['weight_names'] ] optimizer_weight_values = [ optimizer_weights_group[n] for n in optimizer_weight_names ] model.optimizer.set_weights(optimizer_weight_values) f.close() return model
def load_weights(self, filepath, by_name=False, exclude=None, new_folder=False): ''' Modified version of the correspoding Keras function with the addition of multi-GPU support and the ability to exclude some layers from loading. exlude: list of layer names to excluce ''' import h5py from keras.engine import topology log(' >>> load_weights() from : {}'.format(filepath)) if exclude: by_name = True if h5py is None: raise ImportError('`load_weights` requires h5py.') f = h5py.File(filepath, mode='r') pp.pprint(f.__dict__) if 'layer_names' not in f.attrs and 'model_weights' in f: print('im here') f = f['model_weights'] else: print('im not here') # In multi-GPU training, we wrap the model. Get layers # of the inner model because they have the weights. keras_model = self.keras_model layers = keras_model.inner_model.layers if hasattr(keras_model, "inner_model")\ else keras_model.layers print('\n\n') print('--------------------------------') print(' List of all Layers in Model ') print('--------------------------------') print('\n\n') for idx, layer in enumerate(layers): print('>layer {} : name : {:40s} type: {}'.format( idx, layer.name, layer)) # Exclude some layers if exclude: layers = filter(lambda l: l.name not in exclude, layers) print(' --------------------------------------') print(' layers to load (not in exclude list) ') print(' --------------------------------------') for idx, layer in enumerate(layers): print(' >layer {} : name : {:40s} type: {}'.format( idx, layer.name, layer)) print('\n\n') if by_name: topology.load_weights_from_hdf5_group_by_name(f, layers) else: topology.load_weights_from_hdf5_group(f, layers) if hasattr(f, 'close'): f.close() # Update the log directory print(' Weights file loaded: {} '.format(filepath)) print(' Weights file loaded: {} '.format(filepath), file=sys.__stdout__) if self.mode == 'training': self.set_log_dir(filepath, new_folder) print(" MODEL Load weight file COMPLETE ") return (filepath)
def load_model(filepath, custom_objects=None, compile=True): """Loads a model saved via `save_model`. # Arguments filepath: String, path to the saved model. custom_objects: Optional dictionary mapping names (strings) to custom classes or functions to be considered during deserialization. compile: Boolean, whether to compile the model after loading. # Returns A Keras model instance. If an optimizer was found as part of the saved model, the model is already compiled. Otherwise, the model is uncompiled and a warning will be displayed. When `compile` is set to False, the compilation is omitted without any warning. # Raises ImportError: if h5py is not available. ValueError: In case of an invalid savefile. """ if h5py is None: raise ImportError('`load_model` requires h5py.') if not custom_objects: custom_objects = {} def convert_custom_objects(obj): """Handles custom object lookup. # Arguments obj: object, dict, or list. # Returns The same structure, where occurrences of a custom object name have been replaced with the custom object. """ if isinstance(obj, list): deserialized = [] for value in obj: deserialized.append(convert_custom_objects(value)) return deserialized if isinstance(obj, dict): deserialized = {} for key, value in obj.items(): deserialized[key] = convert_custom_objects(value) return deserialized if obj in custom_objects: return custom_objects[obj] return obj with h5py.File(filepath, mode='r') as f: # instantiate model model_config = f.attrs.get('model_config') if model_config is None: raise ValueError('No model found in config file.') model_config = json.loads(model_config.decode('utf-8')) model = model_from_config(model_config, custom_objects=custom_objects) # set weights topology.load_weights_from_hdf5_group(f['model_weights'], model.layers) # Early return if compilation is not required. if not compile: return model # instantiate optimizer training_config = f.attrs.get('training_config') if training_config is None: warnings.warn('No training configuration found in save file: ' 'the model was *not* compiled. Compile it manually.') return model training_config = json.loads(training_config.decode('utf-8')) optimizer_config = training_config['optimizer_config'] optimizer = optimizers.deserialize(optimizer_config, custom_objects=custom_objects) # Recover loss functions and metrics. loss = convert_custom_objects(training_config['loss']) metrics = convert_custom_objects(training_config['metrics']) sample_weight_mode = training_config['sample_weight_mode'] loss_weights = training_config['loss_weights'] # Compile model. model.compile(optimizer=optimizer, loss=loss, metrics=metrics, loss_weights=loss_weights, sample_weight_mode=sample_weight_mode) # Set optimizer weights. if 'optimizer_weights' in f: # Build train function (to get weight updates). if isinstance(model, Sequential): model.model._make_train_function() else: model._make_train_function() optimizer_weights_group = f['optimizer_weights'] optimizer_weight_names = [ n.decode('utf8') for n in optimizer_weights_group.attrs['weight_names'] ] optimizer_weight_values = [ optimizer_weights_group[n] for n in optimizer_weight_names ] try: model.optimizer.set_weights(optimizer_weight_values) except ValueError: warnings.warn('Error in loading the saved optimizer ' 'state. As a result, your model is ' 'starting with a freshly initialized ' 'optimizer.') metadata = f.attrs.get('metadata') if metadata is None: warnings.warn('No metadata found for model.') else: model.metadata = json.loads(metadata.decode('utf-8')) return model
cropped_height, cropped_width) # Set batches of training and validation required train_batches = int(np.ceil(train_count / batch_size)) val_batches = int(np.ceil(val_count / batch_size)) cbcnn_model = vgg_16_cbcnn(input_shape=(cropped_height, cropped_width, 3), no_classes=no_classes, bilinear_output_dim=8192, sum_pool=True, weight_decay_constant=weight_decay_constant, multi_label=False, weights_path=None) with h5py.File(init_weights_path, mode='r') as f: topology.load_weights_from_hdf5_group(f['model_weights'], cbcnn_model.layers) # Define metrics to utilize top3_acc = functools.partial(top_k_categorical_accuracy, k=3) top5_acc = functools.partial(top_k_categorical_accuracy, k=5) top3_acc.__name__ = 'top3_acc' top5_acc.__name__ = 'top5_acc' # Model training section sgd = SGD(lr=INITIAL_LR, momentum=0.9) cbcnn_model.compile(optimizer=sgd, loss='categorical_crossentropy', metrics=['accuracy', top3_acc, top5_acc]) check_pointer = ModelCheckpoint(monitor='val_loss', filepath=DNN_BEST_MODEL, verbose=1,