Example #1
0
def convert(infile, outdir, **kwargs):
    """Convert pb.

  Args:
    infile: Input path.
    outdir: Output path.
    **kwargs: Other args for converting.

  Returns:
    None.
  """
    logging_level = kwargs.get("logging_level", "INFO")
    ext_data_dir = kwargs.get("extdatadir")

    common.logger.setLevel(logging_level)
    common.logger.handlers[0].setLevel(logging_level)
    common.logger.info("Start converting onnx pb to tf saved model")

    # load external data if the file directory is provided
    if ext_data_dir:
        onnx_model = onnx.load(infile, load_external_data=False)
        load_external_data_for_model(onnx_model, ext_data_dir)
    else:
        onnx_model = onnx.load(infile)

    tf_rep = backend.prepare(onnx_model, **kwargs)
    tf_rep.export_graph(outdir)
    common.logger.info("Converting completes successfully.")
Example #2
0
def load_model(f: Union[IO[bytes], Text],
               format: Optional[Any] = None,
               load_external_data: bool = True) -> ModelProto:
    '''
    Loads a serialized ModelProto into memory
    load_external_data is true if the external data under the same directory of the model and load the external data
    If not, users need to call load_external_data_for_model with directory to load

    Arguments:
        f: can be a file-like object (has "read" function) or a string containing a file name
        format: for future use

    Returns:
        Loaded in-memory ModelProto
    '''
    s = _load_bytes(f)
    model = load_model_from_string(s, format=format)

    if load_external_data:
        model_filepath = _get_file_path(f)
        if model_filepath:
            base_dir = os.path.dirname(model_filepath)
            load_external_data_for_model(model, base_dir)

    return model
Example #3
0
def load_model(
    f,
    format=None,
    load_external_data=True
):  # type: (Union[IO[bytes], Text], Optional[Any], bool) -> ModelProto
    '''
    Loads a serialized ModelProto into memory

    @params
    f can be a file-like object (has "read" function) or a string containing a file name
    format is for future use

    @return
    Loaded in-memory ModelProto
    '''
    s = _load_bytes(f)
    model = load_model_from_string(s, format=format)

    if load_external_data:
        model_filepath = _get_file_path(f)
        if model_filepath:
            base_dir = os.path.dirname(model_filepath)
            load_external_data_for_model(model, base_dir)

    return model
Example #4
0
    def test_load_external_data_for_model(self):  # type: () -> None
        model = onnx.load_model(self.model_filename, load_external_data=False)
        load_external_data_for_model(model, self.temp_dir)
        initializer_tensor = model.graph.initializer[0]
        self.assertTrue(np.allclose(to_array(initializer_tensor), self.initializer_value))

        attribute_tensor = model.graph.node[0].attribute[0].t
        self.assertTrue(np.allclose(to_array(attribute_tensor), self.attribute_value))
Example #5
0
def load_initializers_from_onnx(model_path, load_optimizer=False):
    """Load initial weights from an onnx checkpoint.

    Args:
        model_path (str): Path to onnx file.

    Returns:
        Dict: Mapping of popart weight names to numpy values.
    """
    initializers = {}
    # By default onnx.load will look for initializers in the same dir as onnx model.
    # However builder.saveIntializersExternally takes real path or path relative to run dir
    # and stores it in the onnxproto.
    model = onnx.load(model_path, load_external_data=False)

    has_external_data = any(
        is_external_weight(weight) for weight in model.graph.initializer)
    if has_external_data:
        load_external_data_for_model(model, '')

    optimizer_prefix = (popart.reservedAccl1Prefix(),
                        popart.reservedAccl2Prefix(),
                        popart.reservedAcclPrefix(),
                        popart.reservedAccumPrefix(),
                        popart.reservedStepPrefix())

    for weight in model.graph.initializer:
        is_optimizer_state = any(x in weight.name for x in optimizer_prefix)
        if not load_optimizer and is_optimizer_state:
            continue

        if is_external_weight(
                weight) or weight.data_type != onnx.TensorProto.FLOAT16:
            np_weight = numpy_helper.to_array(weight)
        else:
            int_data = np.asarray(weight.int32_data, np.int32)
            np_weight = int_data.view(dtype=np.float16).reshape(weight.dims)

        if is_optimizer_state:
            initializers[weight.name] = np_weight.astype(np.float32)
        else:
            initializers[weight.name] = np_weight

    initializers = handle_split_qkv(initializers)
    initializers = handle_split_word_embedding(initializers)
    return initializers
Example #6
0
 def test_check_model_by_model(self):  # type: () -> None
     model = onnx.load_model(self.model_filename, load_external_data=False)
     load_external_data_for_model(model,
                                  self.temp_dir)  # Exceeds maximum protobuf
     self.assertRaises(ValueError, checker.check_model,
                       model)  # checker catches 2GB models and throw error
Example #7
0
 def test_check_model_by_model(self):  # type: () -> None
     model = onnx.load_model(self.model_filename, load_external_data=False)
     with pytest.raises(ValueError):
         load_external_data_for_model(
             model, self.temp_dir)  # Exceeds maximum protobuf
         checker.check_model(model)  # checker catches 2GB models as well