Пример #1
0
def test_set_name_for_dimension():
    skip_if_onnx_frontend_is_disabled()
    fe = fem.load_by_framework(framework=ONNX_FRONTEND_NAME)
    model = fe.load("test_place_names.onnx")
    dim_name = "batch_size"

    input1 = model.get_place_by_tensor_name(tensorName="in1")
    model.set_name_for_dimension(input1, 0, dim_name)
    assert model.get_partial_shape(input1) == PartialShape([-1, 2])

    output1 = model.get_place_by_tensor_name(tensorName="out1")
    model.set_name_for_dimension(output1, 1, dim_name)
    assert model.get_partial_shape(output1) == PartialShape([1, -1])

    # sub_output rank is 2 so setting dim_name at index 3 extends its rank to 4
    sub_output = model.get_place_by_tensor_name(tensorName="sub_out")
    model.set_name_for_dimension(sub_output, 3, dim_name)
    assert model.get_partial_shape(sub_output) == PartialShape([2, 2, -1, -1])

    with pytest.raises(Exception) as e:
        model.set_name_for_dimension(input1, 0, "")
    assert "name must not be empty" in str(e)

    one_const = model.get_place_by_tensor_name(tensorName="one_const")
    with pytest.raises(Exception) as e:
        model.set_name_for_dimension(one_const, 0, dim_name)
    assert "ONNX initializer shape dimension cannot be dynamic." in str(e)
    def test_set_batch_size(self, mock_argparse):
        mock_return_partial_shape(PartialShape([-1, 2, 3, 4]))
        main(argparse.ArgumentParser(), fem, 'mock_mo_ngraph_frontend')
        stat = get_model_statistic()

        # verify that 'set_element_type' was called
        # 2 is because mock model has 2 inputs
        assert stat.get_partial_shape == 2
        assert stat.set_partial_shape == 2
        assert stat.lastArgPartialShape == PartialShape([123, 2, 3, 4])
    def test_input_shape(self, mock_argparse):
        main(argparse.ArgumentParser(), fem, 'mock_mo_ngraph_frontend')
        stat = get_model_statistic()

        # verify that 'set_partial_shape' was called
        assert stat.set_partial_shape == 1
        assert stat.lastArgPartialShape == PartialShape([1, 2, 3, 4])
Пример #4
0
def test_model_set_partial_shape():
    model = init_model()
    place = model.get_place_by_tensor_name(tensorName="")
    test_shape = PartialShape([1, 2, 3, 4])
    model.set_partial_shape(place=place, shape=test_shape)
    stat = get_mdl_stat(model)
    assert stat.set_partial_shape == 1
    assert stat.lastArgPlace == place
    assert stat.lastArgPartialShape == test_shape
def test_set_partial_shape():
    skip_if_onnx_frontend_is_disabled()
    fe = fem.load_by_framework(framework=ONNX_FRONTEND_NAME)
    assert fe

    model = fe.load("input_model.onnx")
    assert model

    place1 = model.get_place_by_tensor_name(tensorName="in1")
    model.set_partial_shape(place1, PartialShape([8, 16]))
    place2 = model.get_place_by_tensor_name(tensorName="in2")
    model.set_partial_shape(place2, PartialShape([8, 16]))
    place3 = model.get_place_by_tensor_name(tensorName="in3")
    model.set_partial_shape(place3, PartialShape([4, 6]))
    result_func = fe.convert(model)

    expected_model = fe.load("test_partial_shape.onnx")
    expected_func = fe.convert(expected_model)

    res = compare_functions(result_func, expected_func)
    assert res
    def test_error_batch(self, mock_argparse):
        # First dimension doesn't look like a batch,
        # so MO shall not convert anything and produce specified error
        mock_return_partial_shape(PartialShape([122, 2, 3, 4]))
        with self.assertLogs() as logger:
            main(argparse.ArgumentParser(), fem, 'mock_mo_ngraph_frontend')

        stat = get_model_statistic()

        assert [s for s in logger.output if 'question=39' in s]

        # verify that 'get_element_type' was called
        assert stat.get_partial_shape == 1
        # verify that 'set_element_type' was not called
        assert stat.set_partial_shape == 0
def test_get_partial_shape():
    skip_if_onnx_frontend_is_disabled()
    fe = fem.load_by_framework(framework=ONNX_FRONTEND_NAME)
    assert fe

    model = fe.load("input_model.onnx")
    assert model

    place1 = model.get_place_by_tensor_name(tensorName="in1")
    assert model.get_partial_shape(place1) == PartialShape([2, 2])

    place2 = model.get_place_by_tensor_name(tensorName="out1")
    assert model.get_partial_shape(place2) == PartialShape([1, 2])

    place3 = model.get_place_by_tensor_name(tensorName="add_out")
    assert model.get_partial_shape(place3) == PartialShape([2, 2])

    place4 = model.get_place_by_tensor_name(tensorName="in3")
    model.set_partial_shape(place4, PartialShape([4, 6]))
    assert model.get_partial_shape(place4) == PartialShape([4, 6])
    assert model.get_partial_shape(place2) == PartialShape([1, 2])
Пример #8
0
 def shape_to_array(shape: PartialShape):
     return [shape.get_dimension(i) for i in range(shape.rank.get_length())]
Пример #9
0
def moc_pipeline(argv: argparse.Namespace, moc_front_end: FrontEnd):
    """
    Load input model and convert it to nGraph function
    :param: argv: parsed command line arguments
    :param: moc_front_end: Loaded Frontend for converting input model
    :return: converted nGraph function ready for serialization
    """
    input_model = moc_front_end.load(argv.input_model)

    user_shapes, outputs, freeze_placeholder = fe_user_data_repack(
        input_model, argv.placeholder_shapes, argv.placeholder_data_types,
        argv.output, argv.freeze_placeholder_with_value)

    def check_places_are_same(places_original: List[Place],
                              places_new: List[Place]):
        """
        Check if set of new places is same as original or not.
        :param places_original: List[Place] Original model places
        :param places_new: List[Place] New list of places
        :return: True if new list of places is same as original
        """
        return len(places_original) == len(places_new) and len([
            item for item in places_original
            if any([item.is_equal(item2['node']) for item2 in places_new])
        ]) == len(places_original)

    inputs_equal = True
    if user_shapes:
        inputs_equal = check_places_are_same(input_model.get_inputs(),
                                             user_shapes)

    outputs_equal = True
    if outputs:
        outputs_equal = check_places_are_same(input_model.get_outputs(),
                                              outputs)
    log.debug('Inputs are same: {}, outputs are same: {}'.format(
        inputs_equal, outputs_equal))

    if not inputs_equal and not outputs_equal:
        # Use ExtractSubgraph
        new_input_places = [x['node'] for x in user_shapes]
        new_output_places = [x['node'] for x in outputs]
        log.debug('Using extract subgraph')
        input_model.extract_subgraph(new_input_places, new_output_places)
    elif not inputs_equal:
        new_input_places = [x['node'] for x in user_shapes]
        log.debug('Using override_all_inputs')
        input_model.override_all_inputs(new_input_places)
    elif not outputs_equal:
        new_output_places = [x['node'] for x in outputs]
        log.debug('Using override_all_outputs')
        input_model.override_all_outputs(new_output_places)

    if user_shapes:
        for user_shape in user_shapes:
            if user_shape.get('shape') is not None:
                input_model.set_partial_shape(
                    user_shape['node'], PartialShape(user_shape['shape']))
            if user_shape.get('data_type') is not None:
                data_type = get_element_type(user_shape['data_type'])
                log.debug('Set data type: {}'.format(data_type))
                input_model.set_element_type(user_shape['node'], data_type)

    def shape_to_array(shape: PartialShape):
        return [shape.get_dimension(i) for i in range(shape.rank.get_length())]

    # Set batch size
    if argv.batch is not None and argv.batch > 0:
        log.debug('Setting batch size to {}'.format(argv.batch))
        for place in input_model.get_inputs():
            old_partial_shape = input_model.get_partial_shape(place)
            old_shape_array = shape_to_array(
                old_partial_shape) if old_partial_shape.rank.is_static else []
            joined_name = ' '.join(place.get_names())
            validate_batch_in_shape(old_shape_array, joined_name)

            # Assume batch size is always 1-st dimension in shape
            # Keep other dimensions unchanged
            new_shape = [
                old_partial_shape.get_dimension(i)
                for i in range(old_partial_shape.rank.get_length())
            ]
            new_shape[0] = Dimension(argv.batch)

            new_partial_shape = PartialShape(new_shape)
            log.debug('Input: {}, Old shape: {}, New shape: {}'.format(
                joined_name, old_shape_array, new_shape))
            input_model.set_partial_shape(place, new_partial_shape)

    ngraph_function = moc_front_end.convert(input_model)
    return ngraph_function