Beispiel #1
0
 def test_caffe_same_name_layer(self):
     proto = caffe_pb2.NetParameter()
     text_format.Merge(proto_str_multi_input + proto_same_name_layers,
                       proto)
     graph = Graph()
     caffe_pb_to_nx(graph, proto, None)
     # 6 nodes because: 2 inputs + 2 convolutions + 2 identity nodes used as fake outputs
     np.testing.assert_equal(len(graph.nodes()), 6)
Beispiel #2
0
 def test_caffe_same_name_layer(self):
     proto = caffe_pb2.NetParameter()
     text_format.Merge(proto_str_multi_input + proto_same_name_layers,
                       proto)
     graph, input_shapes = caffe_pb_to_nx(proto, None)
     # 6 nodes because: 2 inputs + 2 convolutions
     np.testing.assert_equal(len(graph.nodes()), 4)
Beispiel #3
0
    def test_caffe_pb_to_nx_one_input(self):
        proto = caffe_pb2.NetParameter()
        text_format.Merge(proto_str_one_input, proto)
        input_shapes = caffe_pb_to_nx(Graph(), proto, None)
        expected_input_shapes = {'Input0': np.array([1, 3, 224, 224])}

        for i in expected_input_shapes:
            np.testing.assert_array_equal(input_shapes[i],
                                          expected_input_shapes[i])
Beispiel #4
0
    def test_caffe_pb_to_standart_input(self):
        proto = caffe_pb2.NetParameter()
        text_format.Merge(proto_str_input + layer_proto_str, proto)
        graph, input_shapes = caffe_pb_to_nx(proto, None)
        expected_input_shapes = {'data': np.array([1, 3, 224, 224])}

        for i in expected_input_shapes:
            np.testing.assert_array_equal(input_shapes[i],
                                          expected_input_shapes[i])
def driver(argv: argparse.Namespace):
    caffe_pb2 = loader.import_caffe_pb2(argv.caffe_parser_path)

    proto, model = loader.load_caffe_proto_model(caffe_pb2, argv.input_proto,
                                                 argv.input_model)

    update_extractors_with_extensions(
        caffe_type_extractors, argv.disable_omitting_optional if hasattr(
            argv, 'disable_omitting_optional') else False,
        argv.disable_flattening_optional_params if hasattr(
            argv, 'disable_flattening_optional_params') else False)

    try:
        graph, original_shapes = loader.caffe_pb_to_nx(proto, model)
    except ValueError as e:
        raise Error(
            'Invalid prototxt file: value error {}. ' + refer_to_faq_msg(11),
            str(e)) from e
    graph.check_empty_graph('load_caffe_proto_model')

    graph.__setattr__('proto_path', argv.input_proto)
    graph.__setattr__('caffemodel_path', argv.input_model)
    graph.__setattr__('name', getattr(proto, 'name', None) or argv.model_name)
    graph.graph['layout'] = 'NCHW'
    graph.graph['cmd_params'] = argv
    graph.graph['fw'] = 'caffe'
    graph.graph['original_shapes'] = original_shapes
    graph.graph['caffe_pb2'] = caffe_pb2
    graph.graph['ir_version'] = get_ir_version(argv)

    graph.graph['original_shapes'] = original_shapes
    graph.graph['caffe_pb2'] = caffe_pb2

    custom_layers_map = custom_layers_mapping.load_layers_xml(argv.k)
    custom_layers_mapping.update_extractors(
        caffe_type_extractors,
        custom_layers_map, argv.disable_omitting_optional if hasattr(
            argv, 'disable_omitting_optional') else False,
        argv.enable_flattening_nested_params if hasattr(
            argv, 'enable_flattening_nested_params') else False)
    extract_node_attrs(
        graph, lambda node: caffe_extractor(
            node, check_for_duplicates(caffe_type_extractors)))

    # --------------------------------- LOAD END ------------------------------------------------------

    class_registration.apply_replacements(graph, [
        class_registration.ClassType.FRONT_REPLACER,
        class_registration.ClassType.MIDDLE_REPLACER,
        class_registration.ClassType.BACK_REPLACER
    ])

    return graph
Beispiel #6
0
    def load(self, graph: Graph):
        argv = graph.graph['cmd_params']
        caffe_pb2 = loader.import_caffe_pb2(argv.caffe_parser_path)

        proto, model = loader.load_caffe_proto_model(caffe_pb2,
                                                     argv.input_proto,
                                                     argv.input_model)

        update_extractors_with_extensions(
            caffe_type_extractors, argv.disable_omitting_optional if hasattr(
                argv, 'disable_omitting_optional') else False,
            argv.disable_flattening_optional_params if hasattr(
                argv, 'disable_flattening_optional_params') else False)

        try:
            original_shapes = loader.caffe_pb_to_nx(graph, proto, model)
        except ValueError as e:
            raise Error(
                'Invalid prototxt file: value error {}. ' +
                refer_to_faq_msg(11), str(e)) from e
        graph.check_empty_graph('load_caffe_proto_model')

        graph.__setattr__('proto_path', argv.input_proto)
        graph.__setattr__('caffemodel_path', argv.input_model)
        graph.__setattr__('name',
                          getattr(proto, 'name', None) or argv.model_name)
        graph.graph['layout'] = 'NCHW'
        graph.graph['fw'] = 'caffe'
        graph.graph['original_shapes'] = original_shapes
        graph.graph['caffe_pb2'] = caffe_pb2

        custom_layers_map = custom_layers_mapping.load_layers_xml(argv.k)
        custom_layers_mapping.update_extractors(
            caffe_type_extractors, custom_layers_map,
            argv.disable_omitting_optional if hasattr(
                argv, 'disable_omitting_optional') else False,
            argv.enable_flattening_nested_params if hasattr(
                argv, 'enable_flattening_nested_params') else False)
        extract_node_attrs(
            graph, lambda node: caffe_extractor(
                node, check_for_duplicates(caffe_type_extractors)))
Beispiel #7
0
def driver(argv: argparse.Namespace,
           proto_file_name: str,
           model_file_name: str,
           output_model_name: str,
           output_dir: str,
           caffe_proto_path: str,
           mean_file: str = "",
           mean_file_offsets: tuple = None,
           custom_layers_mapping_path: str = None):
    log_step(argv.steps, 'LOAD')
    meta_info = get_meta_info(argv)

    caffe_pb2 = loader.import_caffe_pb2(caffe_proto_path)

    proto, model = loader.load_caffe_proto_model(caffe_pb2, proto_file_name,
                                                 model_file_name)

    update_extractors_with_extensions(
        caffe_type_extractors, argv.disable_omitting_optional if hasattr(
            argv, 'disable_omitting_optional') else False,
        argv.disable_flattening_optional_params if hasattr(
            argv, 'disable_flattening_optional_params') else False)

    try:
        graph, original_shapes = loader.caffe_pb_to_nx(proto, model)
    except ValueError as e:
        raise Error(
            'Invalid prototxt file: value error {}. ' + refer_to_faq_msg(11),
            str(e)) from e

    log.debug("After caffe_pb_to_nx")
    graph.print_graph_stat()
    graph.check_empty_graph('load_caffe_proto_model')

    graph.__setattr__('proto_path', proto_file_name)
    graph.__setattr__('caffemodel_path', model_file_name)
    graph.__setattr__('name',
                      getattr(proto, 'name', None) or output_model_name)
    graph.graph['layout'] = 'NCHW'
    graph.graph['cmd_params'] = argv
    graph.graph['fw'] = 'caffe'
    if graph.graph['cmd_params'].generate_experimental_IR_V10:
        version = 10
    else:
        version = 6
    graph.graph[
        'ir_version'] = 2 if argv.generate_deprecated_IR_V2 else version

    custom_layers_map = custom_layers_mapping.load_layers_xml(
        custom_layers_mapping_path)
    custom_layers_mapping.update_extractors(
        caffe_type_extractors,
        custom_layers_map, argv.disable_omitting_optional if hasattr(
            argv, 'disable_omitting_optional') else False,
        argv.enable_flattening_nested_params if hasattr(
            argv, 'enable_flattening_nested_params') else False)
    extract_node_attrs(
        graph, lambda node: caffe_extractor(
            node, check_for_duplicates(caffe_type_extractors)))

    # --------------------------------- LOAD END ------------------------------------------------------
    log_step(argv.steps, 'FRONT')
    class_registration.apply_replacements(
        graph, class_registration.ClassType.FRONT_REPLACER)
    log_step(argv.steps, 'MIDDLE')
    class_registration.apply_replacements(
        graph, class_registration.ClassType.MIDDLE_REPLACER)

    # Mark nodes with attr 'can_be_fused': False to disable fusing for specified nodes
    mark_unfused_nodes(graph, argv.finegrain_fusing)

    # need this pass even without fusing to convert scale with 2 inputs
    convert_scale_shift_to_mul_add(graph)
    graph_clean_up(graph)

    if not argv.disable_fusing:
        convert_bn_to_mul_add(graph)
        graph_clean_up(graph)

        fuse_mul_add_sequence(graph)
        graph_clean_up(graph)

        fuse_linear_ops(graph)
        graph_clean_up(graph)

    if not argv.disable_resnet_optimization:
        stride_optimization(graph)

    convert_muladd_to_scaleshift(graph)
    convert_matmul_to_fully_connected(graph)
    batch_norm_fuse(graph)
    convert_add_or_mul_to_scaleshift(graph)  # scale = 1
    graph_clean_up(graph)

    log.debug("After graph_cleanup")
    graph.print_graph_stat()

    if argv.reverse_input_channels:
        reverse_input_channels(graph)

    if argv.move_to_preprocess:
        move_scaleshift_to_preprocess(graph)
        graph_clean_up(graph)

    FuseReshapesSequence().find_and_replace_pattern(graph)
    RemoveRedundantReshapes().find_and_replace_pattern(graph)

    input_names = find_inputs(graph)
    mf = []
    try:
        if mean_file and len(original_shapes) == 1:
            mf = loader.parse_mean(mean_file, original_shapes[input_names[0]],
                                   mean_file_offsets, caffe_pb2)
        elif mean_file:
            raise Error(
                'Mean file for topologies with multiple inputs is not supported. '
                + refer_to_faq_msg(9))
    except ValueError as e:
        raise Error(
            'Cannot load or process mean file: value error {}. ' +
            refer_to_faq_msg(10), str(e)) from e

    merge_nodes_permutations(graph)
    permute_data_nodes_attrs(graph)
    permute_op_nodes_attrs(graph)

    graph_clean_up(graph)
    log_step(argv.steps, 'BACK')
    class_registration.apply_replacements(
        graph, class_registration.ClassType.BACK_REPLACER)

    remove_const_ops(graph)
    CreateConstNodesReplacement().find_and_replace_pattern(graph)

    remove_output_ops(graph)
    log_step(argv.steps, 'EMIT')
    prepare_emit_ir(graph=graph,
                    data_type=argv.data_type,
                    output_dir=output_dir,
                    output_model_name=output_model_name,
                    mean_data=mf,
                    input_names=input_names,
                    meta_info=meta_info)
    return 0
Beispiel #8
0
def driver(argv: argparse.Namespace,
           proto_file_name: str,
           model_file_name: str,
           output_model_name: str,
           outputs: list,
           output_dir: str,
           scale: float,
           user_shapes: [None, list, np.array] = None,
           mean_scale_values: [dict, list] = (),
           mean_file: str = "",
           mean_file_offsets: tuple = None,
           custom_layers_mapping_path: str = None):
    meta_info = get_meta_info(argv)

    FusePermutesSequence.enabled = False

    proto, model = loader.load_caffe_proto_model(proto_file_name,
                                                 model_file_name)

    update_extractors_with_extensions(
        caffe_type_extractors, argv.disable_omitting_optional if hasattr(
            argv, 'disable_omitting_optional') else False,
        argv.disable_flattening_optional_params if hasattr(
            argv, 'disable_flattening_optional_params') else False)

    try:
        graph, original_shapes = loader.caffe_pb_to_nx(proto, model)
    except ValueError as e:
        raise Error(
            'Invalid prototxt file: value error {}. ' + refer_to_faq_msg(11),
            str(e)) from e

    log.debug("After caffe_pb_to_nx")
    print_graph_stat(graph)
    check_empty_graph(graph, 'load_caffe_proto_model')

    graph.__setattr__('proto_path', proto_file_name)
    graph.__setattr__('caffemodel_path', model_file_name)
    graph.__setattr__('name',
                      getattr(proto, 'name', None) or output_model_name)
    graph.graph['layout'] = 'NCHW'
    graph.graph['cmd_params'] = argv
    graph.graph['fw'] = 'caffe'
    graph.graph['ir_version'] = 2 if argv.generate_deprecated_IR_V2 else 4

    extract_node_attrs(graph, lambda node: (True, common_caffe_fields(node)))

    log.debug("After adding specific nodes for outputs")
    print_graph_stat(graph)

    custom_layers_map = custom_layers_mapping.load_layers_xml(
        custom_layers_mapping_path)
    custom_layers_mapping.update_extractors(
        caffe_type_extractors,
        custom_layers_map, argv.disable_omitting_optional if hasattr(
            argv, 'disable_omitting_optional') else False,
        argv.enable_flattening_nested_params if hasattr(
            argv, 'enable_flattening_nested_params') else False)

    extract_node_attrs(
        graph, lambda node: caffe_extractor(
            node, check_for_duplicates(caffe_type_extractors)))

    log.debug("After extract_node_attr")
    print_graph_stat(graph)

    packed_user_shapes, packed_outputs, freeze_placeholder = user_data_repack(
        graph, user_shapes, outputs, argv.freeze_placeholder_with_value)
    if argv.freeze_placeholder_with_value is not None:
        FreezePlaceholderValue.enabled = True
        FreezePlaceholderValue.replacement_dict = freeze_placeholder
        class_registration.update_registration([FrontReplacementSubgraph])
    output_op_nodes = add_output_ops(graph, packed_outputs)
    input_op_nodes = add_input_ops(graph, packed_user_shapes, True)
    override_placeholder_shapes(graph, packed_user_shapes)
    override_batch(graph, argv.batch)
    graph_clean_up(graph)
    check_empty_graph(graph, 'add_output_ops and add_input_ops')
    class_registration.apply_replacements(
        graph, class_registration.ClassType.FRONT_REPLACER)

    graph = create_tensor_nodes(graph)

    log.debug("After create_tensor_nodes")
    print_graph_stat(graph)

    remove_op_nodes(graph, {'op': 'Identity'})
    remove_output_ops(graph)
    graph_clean_up(graph)

    log.debug("After removing specific nodes for output")
    print_graph_stat(graph)

    # you need to pass required network outputs here
    # but we don't have a way yet, so just passing all discovered sinks
    mark_outputs(graph)
    graph_clean_up(graph)
    log.debug("After graph_cleanup")
    print_graph_stat(graph)

    graph = partial_infer(graph)
    log.debug("After partial_infer")
    print_graph_stat(graph)
    check_empty_graph(graph, 'partial_infer')
    duplicate_shared_weights(graph)

    input_op_nodes = add_input_ops(graph, packed_user_shapes, False)
    graph_clean_up(graph)
    check_empty_graph(graph, 'add_input_ops')
    scale_input(graph, scale)

    add_mean_scale_values(graph, mean_scale_values)

    log.debug("Split multi input convolutions")
    convert_multi_input_conv(graph)

    graph_clean_up(graph)
    log.debug("After graph_cleanup")
    print_graph_stat(graph)

    remove_op_nodes(graph, {'op': 'Dropout'})
    remove_op_nodes(graph, {'phase': 0})
    graph_clean_up(graph)

    class_registration.apply_replacements(
        graph, class_registration.ClassType.MIDDLE_REPLACER)

    mean_to_avgpool(graph)

    # Mark nodes with attr 'can_be_fused': False to disable fusing for specified nodes
    mark_unfused_nodes(graph, argv.finegrain_fusing)

    #need this pass even without fusing to convert scale with 2 inputs
    convert_scale_shift_to_mul_add(graph)
    graph_clean_up(graph)

    if not argv.disable_fusing:
        convert_bn_to_mul_add(graph)
        graph_clean_up(graph)

        fuse_mul_add_sequence(graph)
        graph_clean_up(graph)

        fuse_linear_ops(graph)
        graph_clean_up(graph)

    if not argv.disable_resnet_optimization:
        stride_optimization(graph)

    convert_muladd_to_scaleshift_or_power(graph)
    convert_matmul_to_fully_connected(graph)
    batch_norm_fuse(graph)
    convert_mul_add_to_power(graph)
    convert_add_to_scaleshift(graph)  # scale = 1
    convert_mul_to_scaleshift(graph)  # biases = 0

    graph_clean_up(graph)
    log.debug("After graph_cleanup")
    print_graph_stat(graph)

    if argv.reverse_input_channels:
        reverse_input_channels(graph)

    if argv.move_to_preprocess:
        move_scaleshift_to_preprocess(graph)
        graph_clean_up(graph)

    fuse_sequence_of_reshapes(graph)

    input_names = find_inputs(graph)
    mf = []
    try:
        if mean_file and len(original_shapes) == 1:
            mf = loader.parse_mean(mean_file, original_shapes[input_names[0]],
                                   mean_file_offsets)
        elif mean_file:
            raise Error(
                'Mean file for topologies with multiple inputs is not supported. '
                + refer_to_faq_msg(9))
    except ValueError as e:
        raise Error(
            'Cannot load or process mean file: value error {}. ' +
            refer_to_faq_msg(10), str(e)) from e

    class_registration.apply_replacements(
        graph, class_registration.ClassType.BACK_REPLACER)

    prepare_emit_ir(graph=graph,
                    data_type=argv.data_type,
                    output_dir=output_dir,
                    output_model_name=output_model_name,
                    mean_data=mf,
                    input_names=input_names,
                    meta_info=meta_info)
    return 0
Beispiel #9
0
def driver(argv: argparse.Namespace,
           proto_file_name: str,
           model_file_name: str,
           output_model_name: str,
           output_dir: str,
           caffe_proto_path: str,
           custom_layers_mapping_path: str = None):
    meta_info = get_meta_info(argv)

    caffe_pb2 = loader.import_caffe_pb2(caffe_proto_path)

    proto, model = loader.load_caffe_proto_model(caffe_pb2, proto_file_name,
                                                 model_file_name)

    update_extractors_with_extensions(
        caffe_type_extractors, argv.disable_omitting_optional if hasattr(
            argv, 'disable_omitting_optional') else False,
        argv.disable_flattening_optional_params if hasattr(
            argv, 'disable_flattening_optional_params') else False)

    try:
        graph, original_shapes = loader.caffe_pb_to_nx(proto, model)
    except ValueError as e:
        raise Error(
            'Invalid prototxt file: value error {}. ' + refer_to_faq_msg(11),
            str(e)) from e
    graph.check_empty_graph('load_caffe_proto_model')

    graph.__setattr__('proto_path', proto_file_name)
    graph.__setattr__('caffemodel_path', model_file_name)
    graph.__setattr__('name',
                      getattr(proto, 'name', None) or output_model_name)
    graph.graph['layout'] = 'NCHW'
    graph.graph['cmd_params'] = argv
    graph.graph['fw'] = 'caffe'
    if graph.graph['cmd_params'].generate_experimental_IR_V10:
        version = 10
    else:
        version = 6
    graph.graph[
        'ir_version'] = 2 if argv.generate_deprecated_IR_V2 else version

    graph.graph['original_shapes'] = original_shapes
    graph.graph['caffe_pb2'] = caffe_pb2

    custom_layers_map = custom_layers_mapping.load_layers_xml(
        custom_layers_mapping_path)
    custom_layers_mapping.update_extractors(
        caffe_type_extractors,
        custom_layers_map, argv.disable_omitting_optional if hasattr(
            argv, 'disable_omitting_optional') else False,
        argv.enable_flattening_nested_params if hasattr(
            argv, 'enable_flattening_nested_params') else False)
    extract_node_attrs(
        graph, lambda node: caffe_extractor(
            node, check_for_duplicates(caffe_type_extractors)))

    # --------------------------------- LOAD END ------------------------------------------------------

    class_registration.apply_replacements(graph, [
        class_registration.ClassType.FRONT_REPLACER,
        class_registration.ClassType.MIDDLE_REPLACER,
        class_registration.ClassType.BACK_REPLACER
    ])

    prepare_emit_ir(graph=graph,
                    data_type=argv.data_type,
                    output_dir=output_dir,
                    output_model_name=output_model_name,
                    mean_data=graph.graph['mf'],
                    input_names=graph.graph['input_names'],
                    meta_info=meta_info)
    return 0