def create_onnx_modelconfig(create_savedmodel, models_dir, model_version,
                            io_cnt, max_batch, dtype, shape):

    if not tu.validate_for_onnx_model(dtype, dtype, dtype, shape, shape,
                                      shape):
        return

    # Use a different model name for the non-batching variant
    model_name = tu.get_zero_model_name(
        "onnx_nobatch" if max_batch == 0 else "onnx", io_cnt, dtype)
    config_dir = models_dir + "/" + model_name

    config = emu.create_general_modelconfig(model_name,
                                            "onnxruntime_onnx",
                                            max_batch,
                                            emu.repeat(dtype, io_cnt),
                                            emu.repeat(shape, io_cnt),
                                            emu.repeat(shape, io_cnt),
                                            emu.repeat(dtype, io_cnt),
                                            emu.repeat(shape, io_cnt),
                                            emu.repeat(shape, io_cnt),
                                            emu.repeat(None, io_cnt),
                                            force_tensor_number_suffix=True)

    try:
        os.makedirs(config_dir)
    except OSError as ex:
        pass  # ignore existing dir

    with open(config_dir + "/config.pbtxt", "w") as cfile:
        cfile.write(config)
def create_onnx_modelconfig(
        models_dir, model_version, max_batch, dtype, shape):

    if not tu.validate_for_onnx_model(dtype, dtype, dtype, shape, shape, shape):
        return

    model_name = tu.get_sequence_model_name(
        "onnx_nobatch" if max_batch == 0 else "onnx", dtype)
    config_dir = models_dir + "/" + model_name

    instance_group_string = '''
instance_group [
  {
    kind: KIND_GPU
  }
]
'''

    # [TODO] move create_general_modelconfig() out of emu as it is general
    # enough for all backends to use
    config = emu.create_general_modelconfig(model_name, "onnxruntime_onnx", max_batch,
            [dtype], [shape], [None], [dtype], [shape], [None], [None],
            force_tensor_number_suffix=False, instance_group_str=instance_group_string)

    config += '''
sequence_batching {{
  max_sequence_idle_microseconds: 5000000
  control_input [
    {{
      name: "START"
      control [
        {{
          kind: CONTROL_SEQUENCE_START
          {type}_false_true: [ 0, 1 ]
        }}
      ]
    }},
    {{
      name: "READY"
      control [
        {{
          kind: CONTROL_SEQUENCE_READY
          {type}_false_true: [ 0, 1 ]
        }}
      ]
    }}
  ]
}}
'''.format(type="fp32" if dtype == np.float32 else "int32")

    try:
        os.makedirs(config_dir)
    except OSError as ex:
        pass # ignore existing dir

    with open(config_dir + "/config.pbtxt", "w") as cfile:
        cfile.write(config)
def create_onnx_modelconfig(models_dir, model_version, max_batch, dtype,
                            input_shapes, input_model_shapes, output_shapes,
                            output_model_shapes):

    assert len(input_shapes) == len(input_model_shapes)
    assert len(output_shapes) == len(output_model_shapes)
    assert len(input_shapes) == len(output_shapes)
    if not tu.validate_for_onnx_model(dtype, dtype, dtype, input_shapes[0],
                                      input_shapes[0], input_shapes[0]):
        return

    io_cnt = len(input_shapes)

    # Use a different model name for the non-batching variant
    model_name = tu.get_zero_model_name(
        "onnx_nobatch" if max_batch == 0 else "onnx", io_cnt, dtype)
    config_dir = models_dir + "/" + model_name

    # Must make sure all Onnx models will be loaded to the same GPU if they are
    # run on GPU. This is due to the current limitation of Onnx Runtime
    # https://github.com/microsoft/onnxruntime/issues/1034
    instance_group_string = '''
instance_group [
  {
    count: 1
    kind: KIND_GPU
    gpus: [ 0 ]
  }
]
'''

    config = emu.create_general_modelconfig(
        model_name,
        "onnxruntime_onnx",
        max_batch,
        emu.repeat(dtype, io_cnt),
        input_shapes,
        input_model_shapes,
        emu.repeat(dtype, io_cnt),
        output_shapes,
        output_model_shapes,
        emu.repeat(None, io_cnt),
        force_tensor_number_suffix=True,
        instance_group_str=instance_group_string)

    try:
        os.makedirs(config_dir)
    except OSError as ex:
        pass  # ignore existing dir

    with open(config_dir + "/config.pbtxt", "w") as cfile:
        cfile.write(config)
Exemple #4
0
def create_onnx_modelconfig(
        models_dir, max_batch, model_version,
        input_shape, output0_shape, output1_shape,
        input_dtype, output0_dtype, output1_dtype,
        output0_label_cnt, version_policy):

    if not tu.validate_for_onnx_model(input_dtype, output0_dtype, output1_dtype,
                                     input_shape, output0_shape, output1_shape):
        return

    # Use a different model name for the non-batching variant
    model_name = tu.get_model_name("onnx_nobatch" if max_batch == 0 else "onnx",
                                   input_dtype, output0_dtype, output1_dtype)
    config_dir = models_dir + "/" + model_name
    
    # Must make sure all Onnx models will be loaded to the same GPU if they are
    # run on GPU. This is due to the current limitation of Onnx Runtime
    # https://github.com/microsoft/onnxruntime/issues/1034
    instance_group_string = '''
instance_group [
  {
    count: 1
    kind: KIND_GPU
    gpus: [ 0 ]
  }
]
'''
    # [TODO] move create_general_modelconfig() out of emu as it is general
    # enough for all backends to use
    config = emu.create_general_modelconfig(model_name, "onnxruntime_onnx", max_batch,
            emu.repeat(input_dtype, 2), emu.repeat(input_shape, 2), emu.repeat(None, 2),
            [output0_dtype, output1_dtype], [output0_shape, output1_shape], emu.repeat(None, 2),
            ["output0_labels.txt", None],
            version_policy=version_policy, force_tensor_number_suffix=True,
            instance_group_str=instance_group_string)

    try:
        os.makedirs(config_dir)
    except OSError as ex:
        pass # ignore existing dir

    with open(config_dir + "/config.pbtxt", "w") as cfile:
        cfile.write(config)

    with open(config_dir + "/output0_labels.txt", "w") as lfile:
        for l in range(output0_label_cnt):
            lfile.write("label" + str(l) + "\n")
def create_onnx_modelconfig(models_dir, model_version, max_batch, dtype, shape, initial_state):

    if not tu.validate_for_onnx_model(dtype, dtype, dtype, shape, shape, shape):
        return

    model_name = tu.get_sequence_model_name(
        "onnx_nobatch" if max_batch == 0 else "onnx", dtype)
    config_dir = models_dir + "/" + model_name

    if dtype == np.float32:
        control_type = "fp32"
    elif dtype == np.bool_:
        control_type = "bool"
        dtype = np.int32
    else:
        control_type = "int32"

    instance_group_string = '''
instance_group [
  {
    kind: KIND_GPU
  }
]
'''

    # [TODO] move create_general_modelconfig() out of emu as it is general
    # enough for all backends to use
    config = emu.create_general_modelconfig(
        model_name,
        "onnxruntime_onnx",
        max_batch, [dtype], [shape], [None], [dtype], [shape], [None], [None],
        force_tensor_number_suffix=False,
        instance_group_str=instance_group_string)
    
    # Prepare the shapes for initial state initialization
    shape_without_variable_dims = []
    for dim in shape:
        if dim == -1:
            shape_without_variable_dims.append(1)
        else:
            shape_without_variable_dims.append(dim)
    
    if initial_state is None:
        config += '''
    sequence_batching {{
      max_sequence_idle_microseconds: 5000000
      control_input [
        {{
          name: "START"
          control [
            {{
              kind: CONTROL_SEQUENCE_START
              {type}_false_true: [ 0, 1 ]
            }}
          ]
        }},
        {{
          name: "READY"
          control [
            {{
              kind: CONTROL_SEQUENCE_READY
              {type}_false_true: [ 0, 1 ]
            }}
          ]
        }}
      ]
      state [
        {{
          input_name: "INPUT_STATE"
          output_name: "OUTPUT_STATE"
          data_type: {dtype}
          dims: {dims}
        }} 
      ]
    }}
    '''.format(type=control_type,
               dims=tu.shape_to_dims_str(shape),
               dtype=emu.dtype_str(dtype))
    elif initial_state == 'zero':
        config += f'''
    sequence_batching {{
      max_sequence_idle_microseconds: 5000000
      control_input [
        {{
          name: "START"
          control [
            {{
              kind: CONTROL_SEQUENCE_START
              {control_type}_false_true: [ 0, 1 ]
            }}
          ]
        }},
        {{
          name: "READY"
          control [
            {{
              kind: CONTROL_SEQUENCE_READY
              {control_type}_false_true: [ 0, 1 ]
            }}
          ]
        }}
      ]
      state [
        {{
          input_name: "INPUT_STATE"
          output_name: "OUTPUT_STATE"
          data_type: {emu.dtype_str(dtype)}
          dims: {tu.shape_to_dims_str(shape)}
          initial_state: {{
              name: "state init"
              data_type: {emu.dtype_str(dtype)}
              dims: {tu.shape_to_dims_str(shape_without_variable_dims)}
              zero_data: true
          }}
        }} 
      ]
    }}
    '''
    elif initial_state == 'file':
        config += '''
    sequence_batching {{
      max_sequence_idle_microseconds: 5000000
      control_input [
        {{
          name: "START"
          control [
            {{
              kind: CONTROL_SEQUENCE_START
              {type}_false_true: [ 0, 1 ]
            }}
          ]
        }},
        {{
          name: "READY"
          control [
            {{
              kind: CONTROL_SEQUENCE_READY
              {type}_false_true: [ 0, 1 ]
            }}
          ]
        }}
      ]
      state [
        {{
          input_name: "INPUT_STATE"
          output_name: "OUTPUT_STATE"
          data_type: {dtype}
          dims: {dims}
          initial_state: {{
              name: "state init"
              data_type: {dtype}
              dims: {shape_without_variable_dims}
              data_file: input_state_data
          }}
        }} 
      ]
    }}
    '''.format(type=control_type,
               dims=tu.shape_to_dims_str(shape),
               dtype=emu.dtype_str(dtype),
               shape_without_variable_dims=tu.shape_to_dims_str(shape_without_variable_dims))

    try:
        os.makedirs(config_dir)
    except OSError as ex:
        pass  # ignore existing dir

    with open(config_dir + "/config.pbtxt", "w") as cfile:
        cfile.write(config)