Beispiel #1
0
def import_metagraph(model):
    """Imports a trained model metagraph into the current graph."""
    string = load_cached(model + ".metagraph")
    metagraph = tf.MetaGraphDef()
    metagraph.ParseFromString(string)
    tf.train.import_meta_graph(metagraph)
    return metagraph.signature_def
Beispiel #2
0
def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpoint: str = "",
                      model_dir: str = "", saved_model_tags: list = [], meta_graph_file: str = "",
                      user_output_node_names_list: list = []):
    # As a provisional solution, use a native TF methods to load a model protobuf
    graph_def = tf_v1.GraphDef()
    if isinstance(graph_file_name, str) and (re.match('.*\.(ckpt|meta)$', graph_file_name)):
        print('[ WARNING ] The value for the --input_model command line parameter ends with ".ckpt" or ".meta" '
              'extension.\n'
              'It means that the model is not frozen.\n'
              'To load non frozen model to Model Optimizer run:'
              '\n\n1. For "*.ckpt" file:'
              '\n- if inference graph is in binary format'
              '\npython3 mo_tf.py --input_model "path/to/inference_graph.pb" --input_checkpoint "path/to/*.ckpt"'
              '\n- if inference graph is in text format'
              '\npython3 mo_tf.py --input_model "path/to/inference_graph.pbtxt" --input_model_is_text '
              '--input_checkpoint "path/to/*.ckpt"'
              '\n\n2. For "*.meta" file:'
              '\npython3 mo_tf.py --input_meta_graph "path/to/*.meta"')
    variables_values = {}
    try:
        if graph_file_name and not meta_graph_file and not checkpoint:
            # frozen graph
            return read_file_to_graph_def(graph_def, graph_file_name, is_binary), variables_values, 'tf'
        if graph_file_name and not meta_graph_file and checkpoint:
            # inference graph and checkpoint
            graph_def = read_file_to_graph_def(graph_def, graph_file_name, is_binary)
            outputs = get_output_node_names_list(graph_def, user_output_node_names_list)
            if os.path.isfile(checkpoint):
                graph_def = freeze_checkpoint(graph_def=graph_def, checkpoint=checkpoint, output_node_names=outputs)
            elif os.path.isdir(checkpoint):
                graph_def, variables_values = freeze_checkpoints(graph_def=graph_def, checkpoint_dir=checkpoint,
                                                                 output_node_names=outputs)
            # we are sure that checkpoint is existing file or directory due to cli_parser configuration
            return graph_def, variables_values, 'tf'
        if not graph_file_name and meta_graph_file:
            meta_graph_file = deducing_metagraph_path(meta_graph_file)
            input_meta_graph_def = read_file_to_graph_def(tf_v1.MetaGraphDef(), meta_graph_file, is_binary)
            # pylint: disable=no-member
            with tf_v1.Session() as sess:
                restorer = tf_v1.train.import_meta_graph(input_meta_graph_def)
                restorer.restore(sess, re.sub('\.meta$', '', meta_graph_file))
                outputs = get_output_node_names_list(input_meta_graph_def.graph_def, user_output_node_names_list)
                graph_def = tf_v1.graph_util.convert_variables_to_constants(sess, input_meta_graph_def.graph_def,
                                                                            outputs)
                return graph_def, variables_values, 'tf'
        if model_dir:
            # saved model directory
            try:
                env_setup = get_environment_setup("tf")
                # enable eager execution temporarily while TensorFlow 2 model is being loaded
                tf_v1.enable_eager_execution()
                # code to extract GraphDef for TF 2.0 SavedModel format
                # tf.saved_model.load function throws TypeError for TF 1.x SavedModel format in case TF 1.x installed
                imported = tf.saved_model.load(model_dir, saved_model_tags) # pylint: disable=E1120
                # to get a signature by key throws KeyError for TF 1.x SavedModel format in case TF 2.x installed
                concrete_func = imported.signatures[tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY]
                # the aggressive inlining parameter needs to freeze a table of embeddings for Keras Embedding operation
                # and a model with Embedding operation cannot properly converted to IR without this function parameter
                if "tensorflow" in env_setup and env_setup["tensorflow"] >= LooseVersion("2.2.0"):
                    frozen_func = convert_variables_to_constants_v2(concrete_func,
                                                                    lower_control_flow=False,
                                                                    aggressive_inlining=True)  # pylint: disable=E1123
                else:
                    frozen_func = convert_variables_to_constants_v2(concrete_func,
                                                                    lower_control_flow=False)  # pylint: disable=E1123
                graph_def = frozen_func.graph.as_graph_def(add_shapes=True)
                # disable eager execution since next steps are executed with a graph in non-eager mode
                tf_v1.disable_eager_execution()
                return graph_def, variables_values, 'tf2'
            except (TypeError, KeyError):
                # disable eager execution since TensorFlow 1 model is handled
                tf_v1.disable_eager_execution()
                # code to extract GraphDef for TF 1.0 SavedModel format
                tags = saved_model_tags if saved_model_tags is not None else [tf_v1.saved_model.tag_constants.SERVING]
                with tf_v1.Session() as sess:
                    meta_graph_def = tf_v1.saved_model.loader.load(sess, tags, model_dir)
                    outputs = get_output_node_names_list(meta_graph_def.graph_def, user_output_node_names_list)
                    graph_def = tf_v1.graph_util.convert_variables_to_constants(sess, meta_graph_def.graph_def, outputs)
                    return graph_def, variables_values, 'tf'
            except Exception as e:
                raise FrameworkError('SavedModel format load failure: {}', e) from e
    except Exception as e:
        raise FrameworkError('Cannot load input model: {}', e) from e
    raise Error("Unknown configuration of input model parameters")
Beispiel #3
0
def load_tf_graph_def(graph_file_name: str = "",
                      is_binary: bool = True,
                      checkpoint: str = "",
                      model_dir: str = "",
                      saved_model_tags: list = [],
                      meta_graph_file: str = "",
                      user_output_node_names_list: list = []):
    # As a provisional solution, use a native TF methods to load a model protobuf
    graph_def = tf_v1.GraphDef()
    if isinstance(graph_file_name, str) and (re.match('.*\.(ckpt|meta)$',
                                                      graph_file_name)):
        print(
            '[ WARNING ] The value for the --input_model command line parameter ends with ".ckpt" or ".meta" '
            'extension.\n'
            'It means that the model is not frozen.\n'
            'To load non frozen model to Model Optimizer run:'
            '\n\n1. For "*.ckpt" file:'
            '\n- if inference graph is in binary format'
            '\npython3 mo_tf.py --input_model "path/to/inference_graph.pb" --input_checkpoint "path/to/*.ckpt"'
            '\n- if inference graph is in text format'
            '\npython3 mo_tf.py --input_model "path/to/inference_graph.pbtxt" --input_model_is_text '
            '--input_checkpoint "path/to/*.ckpt"'
            '\n\n2. For "*.meta" file:'
            '\npython3 mo_tf.py --input_meta_graph "path/to/*.meta"')
    variables_values = {}
    try:
        if graph_file_name and not meta_graph_file and not checkpoint:
            # frozen graph
            return read_file_to_graph_def(graph_def, graph_file_name,
                                          is_binary), variables_values
        if graph_file_name and not meta_graph_file and checkpoint:
            # inference graph and checkpoint
            graph_def = read_file_to_graph_def(graph_def, graph_file_name,
                                               is_binary)
            outputs = get_output_node_names_list(graph_def,
                                                 user_output_node_names_list)
            if os.path.isfile(checkpoint):
                graph_def = freeze_checkpoint(graph_def=graph_def,
                                              checkpoint=checkpoint,
                                              output_node_names=outputs)
            elif os.path.isdir(checkpoint):
                graph_def, variables_values = freeze_checkpoints(
                    graph_def=graph_def,
                    checkpoint_dir=checkpoint,
                    output_node_names=outputs)
            # we are sure that checkpoint is existing file or directory due to cli_parser configuration
            return graph_def, variables_values
        if not graph_file_name and meta_graph_file:
            meta_graph_file = deducing_metagraph_path(meta_graph_file)
            input_meta_graph_def = read_file_to_graph_def(
                tf_v1.MetaGraphDef(), meta_graph_file, is_binary)
            # pylint: disable=no-member
            with tf_v1.Session() as sess:
                restorer = tf_v1.train.import_meta_graph(input_meta_graph_def)
                restorer.restore(sess, re.sub('\.meta$', '', meta_graph_file))
                outputs = get_output_node_names_list(
                    input_meta_graph_def.graph_def,
                    user_output_node_names_list)
                graph_def = tf_v1.graph_util.convert_variables_to_constants(
                    sess, input_meta_graph_def.graph_def, outputs)
                return graph_def, variables_values
        if model_dir:
            # saved model directory
            tags = saved_model_tags if saved_model_tags is not None else [
                tf_v1.saved_model.tag_constants.SERVING
            ]
            with tf_v1.Session() as sess:
                meta_graph_def = tf_v1.saved_model.loader.load(
                    sess, tags, model_dir)
                outputs = get_output_node_names_list(
                    meta_graph_def.graph_def, user_output_node_names_list)
                graph_def = tf_v1.graph_util.convert_variables_to_constants(
                    sess, meta_graph_def.graph_def, outputs)
                return graph_def, variables_values
    except Exception as e:
        raise FrameworkError('Cannot load input model: {}', e) from e
    raise Error("Unknown configuration of input model parameters")
Beispiel #4
0
def load_tf_graph_def(graph_file_name: str = "",
                      is_binary: bool = True,
                      checkpoint: str = "",
                      model_dir: str = "",
                      saved_model_tags: list = [],
                      meta_graph_file: str = "",
                      user_output_node_names_list: list = []):
    # As a provisional solution, use a native TF methods to load a model protobuf
    graph_def = tf_v1.GraphDef()
    if isinstance(graph_file_name, str) and (re.match(r'.*\.(ckpt|meta)$',
                                                      graph_file_name)):
        print(
            '[ WARNING ] The value for the --input_model command line parameter ends with ".ckpt" or ".meta" '
            'extension.\n'
            'It means that the model is not frozen.\n'
            'To load non frozen model to Model Optimizer run:'
            '\n\n1. For "*.ckpt" file:'
            '\n- if inference graph is in binary format'
            '\npython3 mo_tf.py --input_model "path/to/inference_graph.pb" --input_checkpoint "path/to/*.ckpt"'
            '\n- if inference graph is in text format'
            '\npython3 mo_tf.py --input_model "path/to/inference_graph.pbtxt" --input_model_is_text '
            '--input_checkpoint "path/to/*.ckpt"'
            '\n\n2. For "*.meta" file:'
            '\npython3 mo_tf.py --input_meta_graph "path/to/*.meta"')
    variables_values = {}
    try:
        if graph_file_name and not meta_graph_file and not checkpoint:
            # frozen graph
            return read_file_to_graph_def(
                graph_def, graph_file_name,
                is_binary), variables_values, 'tf', None
        if graph_file_name and not meta_graph_file and checkpoint:
            # inference graph and checkpoint
            graph_def = read_file_to_graph_def(graph_def, graph_file_name,
                                               is_binary)
            outputs = get_output_node_names_list(graph_def,
                                                 user_output_node_names_list)
            if os.path.isfile(checkpoint):
                graph_def = freeze_checkpoint(graph_def=graph_def,
                                              checkpoint=checkpoint,
                                              output_node_names=outputs)
            elif os.path.isdir(checkpoint):
                graph_def, variables_values = freeze_checkpoints(
                    graph_def=graph_def,
                    checkpoint_dir=checkpoint,
                    output_node_names=outputs)
            # we are sure that checkpoint is existing file or directory due to cli_parser configuration
            return graph_def, variables_values, 'tf', None
        if not graph_file_name and meta_graph_file:
            meta_graph_file = deducing_metagraph_path(meta_graph_file)
            input_meta_graph_def = read_file_to_graph_def(
                tf_v1.MetaGraphDef(), meta_graph_file, is_binary)
            # Since version 2.2 TF can fail with internal error while loading graph from .meta file.
            # It happens because some operation may has an _output_shapes attribute inconsistent with the GraphDef
            # calculated value. To avoid this problem we must delete `_output_shapes` attributes from operations
            for node in input_meta_graph_def.graph_def.node:
                if '_output_shapes' in node.attr:
                    del node.attr['_output_shapes']
            # pylint: disable=no-member
            with tf_v1.Session() as sess:
                restorer = tf_v1.train.import_meta_graph(input_meta_graph_def)
                restorer.restore(sess, re.sub(r'\.meta$', '', meta_graph_file))
                outputs = get_output_node_names_list(
                    input_meta_graph_def.graph_def,
                    user_output_node_names_list)
                graph_def = tf_v1.graph_util.convert_variables_to_constants(
                    sess, input_meta_graph_def.graph_def, outputs)
                return graph_def, variables_values, 'tf', None
        if model_dir:
            # saved model directory
            try:
                env_setup = get_environment_setup("tf")
                # enable eager execution temporarily while TensorFlow 2 model is being loaded
                tf_v1.enable_eager_execution()

                try:
                    # Code to extract Keras model.
                    # tf.keras.models.load_model function throws TypeError,KeyError or IndexError
                    # for TF 1.x SavedModel format in case TF 1.x installed
                    imported = tf.keras.models.load_model(model_dir,
                                                          compile=False)
                except:
                    imported = tf.saved_model.load(model_dir, saved_model_tags)  # pylint: disable=E1120

                # to get a signature by key throws KeyError for TF 1.x SavedModel format in case TF 2.x installed
                concrete_func = imported.signatures[
                    tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY]
                # the aggressive inlining parameter needs to freeze a table of embeddings for Keras Embedding operation
                # and a model with Embedding operation cannot properly converted to IR without this function parameter
                if "tensorflow" in env_setup and env_setup[
                        "tensorflow"] >= LooseVersion("2.2.0"):
                    frozen_func = convert_variables_to_constants_v2(
                        concrete_func,
                        lower_control_flow=False,
                        aggressive_inlining=True)  # pylint: disable=E1123
                else:
                    frozen_func = convert_variables_to_constants_v2(
                        concrete_func, lower_control_flow=False)  # pylint: disable=E1123
                graph_def = frozen_func.graph.as_graph_def(add_shapes=True)
                # disable eager execution since next steps are executed with a graph in non-eager mode
                tf_v1.disable_eager_execution()

                input_names = []
                if hasattr(imported, 'inputs'):
                    # Extract tensor names order from Keras model
                    input_names = [tensor.name for tensor in imported.inputs]

                # After model freezing output tensor names are changing and recieve "Func/PartitionedCall" prefix,
                # so output_names from saved_model cannot be used. Here tensor names from frozen graph are used,
                # as TF adds indexed Identity nodes during freezing to each output, so this indexing is used for
                # order alignment.
                output_names = [tensor.name for tensor in frozen_func.outputs]

                inputs_outputs_order = (input_names, output_names)

                return graph_def, variables_values, 'tf2', inputs_outputs_order
            except:
                # disable eager execution since TensorFlow 1 model is handled
                tf_v1.disable_eager_execution()
                # code to extract GraphDef for TF 1.0 SavedModel format
                tags = saved_model_tags if saved_model_tags is not None else [
                    tf_v1.saved_model.tag_constants.SERVING
                ]
                with tf_v1.Session() as sess:
                    meta_graph_def = tf_v1.saved_model.loader.load(
                        sess, tags, model_dir)
                    outputs = get_output_node_names_list(
                        meta_graph_def.graph_def, user_output_node_names_list)
                    graph_def = tf_v1.graph_util.convert_variables_to_constants(
                        sess, meta_graph_def.graph_def, outputs)
                    return graph_def, variables_values, 'tf', None
    except Exception as e:
        raise FrameworkError('Cannot load input model: {}', e) from e
    raise Error("Unknown configuration of input model parameters")