def load_model(filepath, custom_objects=None, compile=True): # pylint: disable=redefined-builtin """Loads a model saved via `save_model`. Arguments: filepath: One of the following: - String, path to the saved model - `h5py.File` object from which to load the model custom_objects: Optional dictionary mapping names (strings) to custom classes or functions to be considered during deserialization. compile: Boolean, whether to compile the model after loading. Returns: A Keras model instance. If an optimizer was found as part of the saved model, the model is already compiled. Otherwise, the model is uncompiled and a warning will be displayed. When `compile` is set to False, the compilation is omitted without any warning. Raises: ImportError: if h5py is not available. ValueError: In case of an invalid savefile. """ if h5py is None: raise ImportError('`load_model` requires h5py.') if not custom_objects: custom_objects = {} def convert_custom_objects(obj): """Handles custom object lookup. Arguments: obj: object, dict, or list. Returns: The same structure, where occurrences of a custom object name have been replaced with the custom object. """ if isinstance(obj, list): deserialized = [] for value in obj: deserialized.append(convert_custom_objects(value)) return deserialized if isinstance(obj, dict): deserialized = {} for key, value in obj.items(): deserialized[key] = convert_custom_objects(value) return deserialized if obj in custom_objects: return custom_objects[obj] return obj opened_new_file = not isinstance(filepath, h5py.File) if opened_new_file: f = h5py.File(filepath, mode='r') else: f = filepath model = None try: # instantiate model model_config = f.attrs.get('model_config') if model_config is None: raise ValueError('No model found in config file.') model_config = json.loads(model_config.decode('utf-8')) model = model_config_lib.model_from_config(model_config, custom_objects=custom_objects) # set weights load_weights_from_hdf5_group(f['model_weights'], model.layers) if compile: # instantiate optimizer training_config = f.attrs.get('training_config') if training_config is None: logging.warning('No training configuration found in save file: ' 'the model was *not* compiled. Compile it manually.') return model training_config = json.loads(training_config.decode('utf-8')) optimizer_config = training_config['optimizer_config'] optimizer = optimizers.deserialize( optimizer_config, custom_objects=custom_objects) # Recover loss functions and metrics. loss = convert_custom_objects(training_config['loss']) metrics = convert_custom_objects(training_config['metrics']) weighted_metrics = convert_custom_objects( training_config.get('weighted_metrics', None)) sample_weight_mode = training_config['sample_weight_mode'] loss_weights = training_config['loss_weights'] # Compile model. model.compile( optimizer=optimizer, loss=loss, metrics=metrics, weighted_metrics=weighted_metrics, loss_weights=loss_weights, sample_weight_mode=sample_weight_mode) # Set optimizer weights. if 'optimizer_weights' in f: # Build train function (to get weight updates). # Models that aren't graph networks must wait until they are called # with data to _make_train_function() and so can't load optimizer # weights. if model._is_graph_network: # pylint: disable=protected-access model._make_train_function() optimizer_weight_values = load_optimizer_weights_from_hdf5_group(f) try: model.optimizer.set_weights(optimizer_weight_values) except ValueError: logging.warning('Error in loading the saved optimizer ' 'state. As a result, your model is ' 'starting with a freshly initialized ' 'optimizer.') else: logging.warning('Sequential models without an `input_shape` ' 'passed to the first layer cannot reload their ' 'optimizer state. As a result, your model is' 'starting with a freshly initialized optimizer.') finally: if opened_new_file: f.close() return model
def load_model_from_hdf5(filepath, custom_objects=None, compile=True): # pylint: disable=redefined-builtin """Loads a model saved via `save_model_to_hdf5`. Arguments: filepath: One of the following: - String, path to the saved model - `h5py.File` object from which to load the model custom_objects: Optional dictionary mapping names (strings) to custom classes or functions to be considered during deserialization. compile: Boolean, whether to compile the model after loading. Returns: A Keras model instance. If an optimizer was found as part of the saved model, the model is already compiled. Otherwise, the model is uncompiled and a warning will be displayed. When `compile` is set to False, the compilation is omitted without any warning. Raises: ImportError: if h5py is not available. ValueError: In case of an invalid savefile. """ if h5py is None: raise ImportError('`load_model` requires h5py.') if not custom_objects: custom_objects = {} opened_new_file = not isinstance(filepath, h5py.File) if opened_new_file: f = h5py.File(filepath, mode='r') else: f = filepath model = None try: # instantiate model model_config = f.attrs.get('model_config') if model_config is None: raise ValueError('No model found in config file.') model_config = json.loads(model_config.decode('utf-8')) model = model_config_lib.model_from_config( model_config, custom_objects=custom_objects) # set weights load_weights_from_hdf5_group(f['model_weights'], model.layers) if compile: # instantiate optimizer training_config = f.attrs.get('training_config') if training_config is None: logging.warning( 'No training configuration found in save file: ' 'the model was *not* compiled. Compile it manually.') return model training_config = json.loads(training_config.decode('utf-8')) # Compile model. model.compile(**saving_utils.compile_args_from_training_config( training_config, custom_objects)) # Set optimizer weights. if 'optimizer_weights' in f: # Build train function (to get weight updates). # Models that aren't graph networks must wait until they are called # with data to _make_train_function() and so can't load optimizer # weights. if model._is_graph_network: # pylint: disable=protected-access model._make_train_function() optimizer_weight_values = load_optimizer_weights_from_hdf5_group( f) try: model.optimizer.set_weights(optimizer_weight_values) except ValueError: logging.warning('Error in loading the saved optimizer ' 'state. As a result, your model is ' 'starting with a freshly initialized ' 'optimizer.') else: logging.warning( 'Sequential models without an `input_shape` ' 'passed to the first layer cannot reload their ' 'optimizer state. As a result, your model is' 'starting with a freshly initialized optimizer.') finally: if opened_new_file: f.close() return model
def load_model_from_hdf5(filepath, custom_objects=None, compile=True): # pylint: disable=redefined-builtin """Loads a model saved via `save_model_to_hdf5`. Arguments: filepath: One of the following: - String, path to the saved model - `h5py.File` object from which to load the model custom_objects: Optional dictionary mapping names (strings) to custom classes or functions to be considered during deserialization. compile: Boolean, whether to compile the model after loading. Returns: A Keras model instance. If an optimizer was found as part of the saved model, the model is already compiled. Otherwise, the model is uncompiled and a warning will be displayed. When `compile` is set to False, the compilation is omitted without any warning. Raises: ImportError: if h5py is not available. ValueError: In case of an invalid savefile. """ if h5py is None: raise ImportError('`load_model` requires h5py.') if not custom_objects: custom_objects = {} opened_new_file = not isinstance(filepath, h5py.File) if opened_new_file: f = h5py.File(filepath, mode='r') else: f = filepath model = None try: # instantiate model model_config = f.attrs.get('model_config') if model_config is None: raise ValueError('No model found in config file.') model_config = json.loads(model_config.decode('utf-8')) model = model_config_lib.model_from_config(model_config, custom_objects=custom_objects) # set weights load_weights_from_hdf5_group(f['model_weights'], model.layers) if compile: # instantiate optimizer training_config = f.attrs.get('training_config') if training_config is None: logging.warning('No training configuration found in the save file, so ' 'the model was *not* compiled. Compile it manually.') return model training_config = json.loads(training_config.decode('utf-8')) # Compile model. model.compile(**saving_utils.compile_args_from_training_config( training_config, custom_objects)) # Set optimizer weights. if 'optimizer_weights' in f: try: model.optimizer._create_all_weights(model.trainable_variables) except (NotImplementedError, AttributeError): logging.warning( 'Error when creating the weights of optimizer {}, making it ' 'impossible to restore the saved optimizer state. As a result, ' 'your model is starting with a freshly initialized optimizer.') optimizer_weight_values = load_optimizer_weights_from_hdf5_group(f) try: model.optimizer.set_weights(optimizer_weight_values) except ValueError: logging.warning('Error in loading the saved optimizer ' 'state. As a result, your model is ' 'starting with a freshly initialized ' 'optimizer.') finally: if opened_new_file: f.close() return model
def load_model(filepath, custom_objects=None, compile=True): # pylint: disable=redefined-builtin """Loads a model saved via `save_model`. Arguments: filepath: One of the following: - String, path to the saved model - `h5py.File` object from which to load the model custom_objects: Optional dictionary mapping names (strings) to custom classes or functions to be considered during deserialization. compile: Boolean, whether to compile the model after loading. Returns: A Keras model instance. If an optimizer was found as part of the saved model, the model is already compiled. Otherwise, the model is uncompiled and a warning will be displayed. When `compile` is set to False, the compilation is omitted without any warning. Raises: ImportError: if h5py is not available. ValueError: In case of an invalid savefile. """ if h5py is None: raise ImportError('`load_model` requires h5py.') if not custom_objects: custom_objects = {} def convert_custom_objects(obj): """Handles custom object lookup. Arguments: obj: object, dict, or list. Returns: The same structure, where occurrences of a custom object name have been replaced with the custom object. """ if isinstance(obj, list): deserialized = [] for value in obj: deserialized.append(convert_custom_objects(value)) return deserialized if isinstance(obj, dict): deserialized = {} for key, value in obj.items(): deserialized[key] = convert_custom_objects(value) return deserialized if obj in custom_objects: return custom_objects[obj] return obj opened_new_file = not isinstance(filepath, h5py.File) if opened_new_file: f = h5py.File(filepath, mode='r') else: f = filepath model = None try: # instantiate model model_config = f.attrs.get('model_config') if model_config is None: raise ValueError('No model found in config file.') model_config = json.loads(model_config.decode('utf-8')) model = model_config_lib.model_from_config( model_config, custom_objects=custom_objects) # set weights load_weights_from_hdf5_group(f['model_weights'], model.layers) if compile: # instantiate optimizer training_config = f.attrs.get('training_config') if training_config is None: logging.warning( 'No training configuration found in save file: ' 'the model was *not* compiled. Compile it manually.') return model training_config = json.loads(training_config.decode('utf-8')) optimizer_config = training_config['optimizer_config'] optimizer = optimizers.deserialize(optimizer_config, custom_objects=custom_objects) # Recover loss functions and metrics. loss = convert_custom_objects(training_config['loss']) metrics = convert_custom_objects(training_config['metrics']) weighted_metrics = convert_custom_objects( training_config.get('weighted_metrics', None)) sample_weight_mode = training_config['sample_weight_mode'] loss_weights = training_config['loss_weights'] # Compile model. model.compile(optimizer=optimizer, loss=loss, metrics=metrics, weighted_metrics=weighted_metrics, loss_weights=loss_weights, sample_weight_mode=sample_weight_mode) # Set optimizer weights. if 'optimizer_weights' in f: # Build train function (to get weight updates). # Models that aren't graph networks must wait until they are called # with data to _make_train_function() and so can't load optimizer # weights. if model._is_graph_network: # pylint: disable=protected-access model._make_train_function() optimizer_weight_values = load_optimizer_weights_from_hdf5_group( f) try: model.optimizer.set_weights(optimizer_weight_values) except ValueError: logging.warning('Error in loading the saved optimizer ' 'state. As a result, your model is ' 'starting with a freshly initialized ' 'optimizer.') else: logging.warning( 'Sequential models without an `input_shape` ' 'passed to the first layer cannot reload their ' 'optimizer state. As a result, your model is' 'starting with a freshly initialized optimizer.') finally: if opened_new_file: f.close() return model
def load( self, *, timestamp: Optional[Timestamp] = None, compile_model: bool = False, custom_objects: Optional[Mapping[str, Any]] = None, input_shape: Optional[Tuple[int, ...]] = None, ) -> tf.keras.Model: """ Load a Tensorflow model from a TileDB array. :param timestamp: Range of timestamps to load fragments of the array which live in the specified time range. :param compile_model: Whether to compile the model after loading or not. :param custom_objects: Mapping of names to custom classes or functions to be considered during deserialization. :param input_shape: The shape that the custom model expects as input :return: Tensorflow model. """ # TODO: Change timestamp when issue in core is resolved with tiledb.open(self.uri, ctx=self.ctx, timestamp=timestamp) as model_array: model_array_results = model_array[:] model_config = json.loads(model_array.meta["model_config"]) model_class = model_config["class_name"] if model_class != "Sequential" and model_class != "Functional": with generic_utils.SharedObjectLoadingScope(): with generic_utils.CustomObjectScope(custom_objects or {}): if hasattr(model_config, "decode"): model_config = model_config.decode("utf-8") model = model_config_lib.model_from_config( model_config, custom_objects=custom_objects) if not model.built: model.build(input_shape) # Load weights for layers self._load_custom_subclassed_model(model, model_array) else: cls = (tf.keras.Sequential if model_class == "Sequential" else tf.keras.Model) model = cls.from_config(model_config["config"]) model_weights = pickle.loads( model_array_results["model_weights"].item(0)) model.set_weights(model_weights) if compile_model: optimizer_weights = pickle.loads( model_array_results["optimizer_weights"].item(0)) training_config = json.loads( model_array.meta["training_config"]) # Compile model. model.compile(**saving_utils.compile_args_from_training_config( training_config, custom_objects)) saving_utils.try_build_compiled_arguments(model) # Set optimizer weights. if optimizer_weights: try: model.optimizer._create_all_weights( model.trainable_variables) except (NotImplementedError, AttributeError): logging.warning( "Error when creating the weights of optimizer {}, making it " "impossible to restore the saved optimizer state. As a result, " "your model is starting with a freshly initialized optimizer." ) try: model.optimizer.set_weights(optimizer_weights) except ValueError: logging.warning("Error in loading the saved optimizer " "state. As a result, your model is " "starting with a freshly initialized " "optimizer.") return model