Esempio n. 1
0
    def analyze(self, graph: Graph):
        tf_1_names = ['image_tensor', 'detection_classes', 'detection_boxes', 'detection_scores',
                      ('Preprocessor', 'map')]
        tf_1_cond = all([graph_contains_scope(graph, scope) for scope in tf_1_names])

        tf_2_names = ['input_tensor', 'output_control_node', 'Identity_', ('Preprocessor', 'map')]
        tf_2_cond = all([graph_contains_scope(graph, scope) for scope in tf_2_names])

        if not tf_1_cond and not tf_2_cond:
            log.debug('The model does not contain nodes that must exist in the TF OD API models')
            return None, None

        for flavor, scopes_tuple in self.model_scopes.items():
            for scopes in scopes_tuple:
                if all([graph_contains_scope(graph, scope) for scope in scopes]):
                    result = dict()
                    result['flavor'] = flavor
                    result['mandatory_parameters'] = {'tensorflow_use_custom_operations_config':
                                                          files_by_pattern(get_mo_root_dir() + '/openvino/tools/mo/front/tf',
                                                                           __class__.file_patterns[flavor],
                                                                           add_prefix=True),
                                                      'tensorflow_object_detection_api_pipeline_config': None,
                                                      }
                    message = "Your model looks like TensorFlow Object Detection API Model.\n" \
                              "Check if all parameters are specified:\n" \
                              "\t--transformations_config\n" \
                              "\t--tensorflow_object_detection_api_pipeline_config\n" \
                              "\t--input_shape (optional)\n" \
                              "\t--reverse_input_channels (if you convert a model to use with the Inference Engine sample applications)\n" \
                              "Detailed information about conversion of this model can be found at\n" \
                              "https://docs.openvinotoolkit.org/latest/_docs_MO_DG_prepare_model_convert_model_tf_specific_Convert_Object_Detection_API_Models.html"
                    return {'model_type': {'TF_OD_API': result}}, message
        return None, None
Esempio n. 2
0
def generate_mo_version():
    """
    Function generates version like in cmake
    custom_{branch_name}_{commit_hash}
    """
    try:
        mo_dir = get_mo_root_dir()
        branch_name = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=mo_dir).strip().decode()
        commit_hash = subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=mo_dir).strip().decode()
        return "{}.custom_{}_{}".format(mo_major_and_minor_version, branch_name, commit_hash)
    except Exception as e:
        return "unknown version"
class TestSchema(UnitTestWithMockedTelemetry):
    base_dir = get_mo_root_dir()
    schema_file = os.path.join(base_dir, 'mo', 'utils', 'schema.json')
    transformation_configs = get_json_configs(base_dir)
    test_json1 = '[{"id": "", "match_kind": "general", "custom_attributes": {}}]'
    test_json2 = '[{"id": "someid", "match_kind": "abc", "custom_attributes": {}}]'

    @generate(*transformation_configs)
    def test_schema_file(self, transformation_config):
        self.assertTrue(load_and_validate_json_config(transformation_config))

    def test_schema_id_empty(self):
        self.assertRaises(Error, load_and_validate_json_config, self.test_json1)

    def test_schema_match_kind_wrong(self):
        self.assertRaises(Error, load_and_validate_json_config, self.test_json2)
Esempio n. 4
0
def restore_graph_from_ir(path_to_xml: str, path_to_bin: str = None) -> (Graph, dict):
    """
    Function to make valid graph and metadata for MO back stage from IR.
    :param path_to_xml:
    :param path_to_bin:
    :return: (restored graph, meta data)
    """
    ir = IREngine(path_to_xml, path_to_bin)
    assert ir.graph.graph.get('ir_version') >= 10, 'IR version {} is not supported, ' \
        'please generate actual IR for your model and use it.'.format(ir.graph.graph.get('ir_version'))

    path = get_mo_root_dir()
    collect_ops(path)
    collect_extenders(path)

    # Create a new copy of graph with correct attributes (shape & type infer, backend attrs etc.)
    new_graph = copy_graph_with_ops(ir.graph)

    return new_graph, copy(ir.meta_data)