Beispiel #1
0
def test_get_IENetwork_from_nGraph():
    func = create_relu([1, 3, 22, 22])
    caps = Function.to_capsule(func)
    cnnNetwork = IENetwork(caps)
    assert cnnNetwork != None
    assert ng.function_from_cnn(cnnNetwork) != None
    func2 = ng.function_from_cnn(cnnNetwork)
    assert func2 != None
Beispiel #2
0
def test_get_IENetwork_from_nGraph():
    element_type = Type.f32
    param = Parameter(element_type, Shape([1, 3, 22, 22]))
    relu = ng.relu(param)
    func = Function([relu], [param], 'test')
    caps = Function.to_capsule(func)
    cnnNetwork = IENetwork(caps)
    assert cnnNetwork != None
    assert ng.function_from_cnn(cnnNetwork) != None
    func2 = ng.function_from_cnn(cnnNetwork)
    assert func2 != None
Beispiel #3
0
def test_create_two_exec_net():
    function = create_relu([
        ng.Dimension(0, 5),
        ng.Dimension(4),
        ng.Dimension(20),
        ng.Dimension(20)
    ])
    net = ng.function_to_cnn(function)
    ie_core = IECore()
    ie_core.register_plugin("ov_template_plugin", "TEMPLATE")
    exec_net1 = ie_core.load_network(net, "TEMPLATE", num_requests=2)
    assert ng.function_from_cnn(net) != None
    exec_net2 = ie_core.load_network(net, "TEMPLATE", num_requests=2)
    assert ng.function_from_cnn(net) != None
Beispiel #4
0
def test_serialize():
    ie = IECore()
    net = ie.read_network(model=test_net_xml, weights=test_net_bin)
    net.serialize("./serialized_net.xml", "./serialized_net.bin")
    serialized_net = ie.read_network(model="./serialized_net.xml",
                                     weights="./serialized_net.bin")
    func_net = ng.function_from_cnn(net)
    ops_net = func_net.get_ordered_ops()
    ops_net_names = [op.friendly_name for op in ops_net]
    func_serialized_net = ng.function_from_cnn(serialized_net)
    ops_serialized_net = func_serialized_net.get_ordered_ops()
    ops_serialized_net_names = [op.friendly_name for op in ops_serialized_net]
    assert ops_serialized_net_names == ops_net_names
    os.remove("./serialized_net.xml")
    os.remove("./serialized_net.bin")
Beispiel #5
0
def test_getting_shapes():
    ie = IECore()
    net = ie.read_network(model=test_net_xml, weights=test_net_bin)
    func = ng.function_from_cnn(net)
    ops = func.get_ordered_ops()
    shapes = [sh for sh in ops[2].shape]
    assert shapes == [1, 16, 32, 32]
    def test_dlsdk_launcher_valid_affinity_map(self, mocker, models_dir):
        affinity_map = {'conv1': 'GPU'}
        if not has_layers():
            affinity_map.update({'conv1/Dims294/copy_const': 'GPU'})

        mocker.patch(
            'openvino.tools.accuracy_checker.launcher.dlsdk_launcher.read_yaml',
            return_value=affinity_map)

        dlsdk_test_model = get_dlsdk_test_model(
            models_dir, {
                'device': 'HETERO:CPU,GPU',
                'affinity_map': './affinity_map.yml'
            })
        if has_layers():
            layers = dlsdk_test_model.network.layers
            for key, value in affinity_map.items():
                assert layers[key].affinity == value
        else:
            ng_function = ng.function_from_cnn(dlsdk_test_model.network)
            for node in ng_function.get_ordered_ops():
                if node.get_friendly_name() != 'conv1':
                    continue
                assert node.get_friendly_name() in affinity_map
                assert node.get_rt_info()['affinity'] == affinity_map[
                    node.get_friendly_name()]
Beispiel #7
0
def test_moc_transformations():
    net = get_test_cnnnetwork()
    ApplyMOCTransformations(net, False)

    f = ng.function_from_cnn(net)
    assert f != None
    assert len(f.get_ops()) == 3
Beispiel #8
0
def test_low_latency_transformations():
    net = get_test_cnnnetwork()
    ApplyLowLatencyTransformation(net)

    f = ng.function_from_cnn(net)
    assert f != None
    assert len(f.get_ops()) == 3
Beispiel #9
0
def test_pruning_transformations():
    net = get_test_cnnnetwork()
    ApplyPruningTransformation(net)

    f = ng.function_from_cnn(net)
    assert f != None
    assert len(f.get_ops()) == 3
    def init_network(self):
        """Function which initializes Intel Inference Engine.
        """
        # Load OpenVINO Inference Engine.
        self.get_logger().info(f"Loading Inference Engine on {self.device}")
        self.ie = IECore()

        # Read and load the network.
        self.net = self.ie.read_network(model=constants.MODEL_XML,
                                        weights=constants.MODEL_BIN)
        self.func = ng.function_from_cnn(self.net)
        self.ops = self.func.get_ordered_ops()
        self.exec_net = self.ie.load_network(network=self.net,
                                             device_name=self.device)

        # Read expected input image info from network and prepare input blobs.
        # n: batch size, c: no. of channels, h: input height, w: input width
        for self.input_key in self.net.input_info:
            self.input_name = self.input_key
            self.n, self.c, self.h, self.w = self.net.input_info[
                self.input_key].input_data.shape
        # Initializing to float for optimizing in later functions
        self.h = float(self.h)
        self.w = float(self.w)

        # Prepare output blobs
        self.out_blob = next(iter(self.net.outputs))
Beispiel #11
0
 def __init__(self, model, confidence_threshold=0.5, iou_threshold=0.4):
     super(YoloDetector, self).__init__(model)
     # パラメータチェック
     assert 0.0 <= confidence_threshold and confidence_threshold <= 1.0, \
         "Confidence threshold is expected to be in range [0; 1]"
     assert 0.0 <= iou_threshold and iou_threshold <= 1.0, \
         "Intersection over union threshold is expected to be in range [0; 1]"
     
     self.confidence_threshold = confidence_threshold
     self.iou_threshold = iou_threshold
     
     if hasattr(model, "input_info") :
         # 2021.1 以降用
         # このプログラムは1入力のモデルのみサポートしているので、チェック
         assert len(model.input_info) == 1, "Expected 1 input blob"
         self.input_blob =  next(iter(model.input_info))
         self.input_shape = model.input_info[self.input_blob].input_data.shape
     else :
         # 2020.4 以前用
         # このプログラムは1入力のモデルのみサポートしているので、チェック
         assert len(model.inputs) == 1, "Expected 1 input blob"
         self.input_blob =  next(iter(model.inputs))
         self.input_shape = model.inputs[self.input_blob].shape
     
     self.layer_params = {}
     '''
     次の行で2021.1で以下のwarningが出る対策
     DeprecationWarning: 'layers' property of IENetwork class is deprecated. 
     For iteration over network please use get_ops()/get_ordered_ops() methods from nGraph Python API
     '''
     if ng :
         # ng が定義されてたら → 2021.1以降 
         function = ng.function_from_cnn(model)
         nodes = { n.friendly_name : n for n in function.get_ops() }
         self.layer_params_aaa = {}
         for layer_name in model.outputs.keys() :
             # print(layer_name)
             output_shapes = list(nodes[layer_name].inputs()[0].get_source_output().get_node().shape)
             output_params = nodes[layer_name]._get_attributes()
             
             assert output_shapes[2] == output_shapes[3], \
                     f"Invalid size of output blob. It sould be in NCHW layout and height should be equal to width. " \
                      "Current height = {output_shapes[2]}, " \
                      "current width = {output_shapes[3]}"
              
             self.layer_params[layer_name]  = self.YoloParams(output_params, output_shapes[2])
     else :
         layers = model.layers
         for layer_name in model.outputs.keys() :
             # print(layer_name)
             output_shapes = layers[layers[layer_name].parents[0]].out_data[0].shape
             output_params = layers[layer_name].params
             
             assert output_shapes[2] == output_shapes[3], \
                     f"Invalid size of output blob. It sould be in NCHW layout and height should be equal to width. " \
                      "Current height = {output_shapes[2]}, " \
                      "current width = {output_shapes[3]}"
              
             self.layer_params[layer_name]  = self.YoloParams(output_params, output_shapes[2])
Beispiel #12
0
def test_create_IENetwork_from_nGraph():
    func = create_ngraph_function([1, 3, 22, 22])
    caps = Function.to_capsule(func)
    cnnNetwork = IENetwork(caps)
    assert cnnNetwork != None
    func2 = ng.function_from_cnn(cnnNetwork)
    assert func2 != None
    assert len(func2.get_ops()) == 3
Beispiel #13
0
def test_make_stateful_transformations():
    net = get_test_cnnnetwork()
    ApplyMakeStatefulTransformation(net, {"parameter": "result"})

    f = ng.function_from_cnn(net)
    assert f != None
    assert len(f.get_parameters()) == 0
    assert len(f.get_results()) == 0
Beispiel #14
0
def test_get_ops_from_IENetwork():
    ie = IECore()
    net = ie.read_network(model=test_net_xml, weights=test_net_bin)
    func = ng.function_from_cnn(net)
    ops = func.get_ordered_ops()
    ops_names = [op.friendly_name for op in ops]
    assert len(ops_names) != 0
    assert 'data' in ops_names
Beispiel #15
0
def test_get_set_rt_info():
    ie = IECore()
    net = ie.read_network(model=test_net_xml, weights=test_net_bin)
    func = ng.function_from_cnn(net)
    ops = func.get_ordered_ops()
    rt_info = ops[14].get_rt_info()
    rt_info["affinity"] = "test_affinity"
    assert ops[14].get_rt_info()["affinity"] == "test_affinity"
Beispiel #16
0
 def _get_meta_from_ngraph(self, layers_info):
     ng_func = ngraph.function_from_cnn(self.net)
     for node in ng_func.get_ordered_ops():
         layer_name = node.get_friendly_name()
         if layer_name not in layers_info.keys():
             continue
         layers_info[layer_name].meta = node._get_attributes()
         layers_info[layer_name].type = node.get_type_name()
     return layers_info
Beispiel #17
0
def import_onnx_model(model: onnx.ModelProto) -> Function:
    onnx.checker.check_model(model)
    model_byte_string = model.SerializeToString()

    ie = IECore()
    ie_network = ie.read_network(model=model_byte_string, weights=b"", init_from_buffer=True)

    ng_function = ng.function_from_cnn(ie_network)
    return ng_function
Beispiel #18
0
def test_CreateIENetworkFromNGraph():
    element_type = Type.f32
    param = Parameter(element_type, Shape([1, 3, 22, 22]))
    relu = ng.relu(param)
    func = Function([relu], [param], 'test')
    caps = Function.to_capsule(func)
    cnnNetwork = IENetwork(caps)
    assert cnnNetwork != None
    assert ng.function_from_cnn(cnnNetwork) != None
    assert len(cnnNetwork.layers) == 2
Beispiel #19
0
def test_serialize(device):
    ie = IECore()
    if device == "CPU":
        if ie.get_metric(device, "FULL_DEVICE_NAME") == "arm_compute::NEON":
            pytest.skip("Can't run on ARM plugin due-to ngraph")
    import ngraph as ng
    net = ie.read_network(model=test_net_xml, weights=test_net_bin)
    net.serialize("./serialized_net.xml", "./serialized_net.bin")
    serialized_net = ie.read_network(model="./serialized_net.xml",
                                     weights="./serialized_net.bin")
    func_net = ng.function_from_cnn(net)
    ops_net = func_net.get_ordered_ops()
    ops_net_names = [op.friendly_name for op in ops_net]
    func_serialized_net = ng.function_from_cnn(serialized_net)
    ops_serialized_net = func_serialized_net.get_ordered_ops()
    ops_serialized_net_names = [op.friendly_name for op in ops_serialized_net]
    assert ops_serialized_net_names == ops_net_names
    os.remove("./serialized_net.xml")
    os.remove("./serialized_net.bin")
Beispiel #20
0
def test_query_network(device):
    ie = IECore()
    net = ie.read_network(model=test_net_xml, weights=test_net_bin)
    query_res = ie.query_network(net, device)
    func_net = ng.function_from_cnn(net)
    ops_net = func_net.get_ordered_ops()
    ops_net_names = [op.friendly_name for op in ops_net]
    assert [key for key in query_res.keys() if key not in ops_net_names] == [], \
        "Not all network layers present in query_network results"
    assert next(iter(set(query_res.values()))) == device, "Wrong device for some layers"
    def load(self):
        with open(os.path.join(self.model_path, 'config.json')) as f:
            data = json.load(f)
        try:
            self.validate_json_configuration(data)
            self.set_model_configuration(data)
        except ApplicationError as e:
            raise e

        with open(os.path.join(self.model_path, 'classes.txt'), 'r') as f:
            self.classes = [line.strip() for line in f.readlines()]

        self.ie = IECore()

        # Load the model
        models_path = Path(self.model_path)
        model_xml = next(models_path.glob("*.xml"))
        model_bin = model_xml.parent / (model_xml.stem + ".bin")
        self.net = self.ie.read_network(model=str(model_xml),
                                        weights=str(model_bin))
        func = ng.function_from_cnn(self.net)
        ops = func.get_ordered_ops()

        assert (len(self.net.input_info.keys()) in {1, 2}), \
         "Network must have one or two inputs"

        self.input_name, self.input_info_name = None, None
        for k, v in self.net.input_info.items():
            if len(v.layout) == 4:
                self.input_name = k
                v.precision = "U8"
            elif len(v.layout) == 2:
                self.input_info_name = k
                v.precision = "FP32"
                assert (v.input_data.shape[1] in {3, 6}
                        and v.input_data.shape[0]
                        == 1), "Input info should be 3 or 6 values length"

        self.output_name, output_info = "", self.net.outputs[next(
            iter(self.net.outputs.keys()))]
        output_ops = {op.friendly_name : op for op in ops \
           if op.friendly_name in self.net.outputs and op.get_type_name() == "DetectionOutput"}
        if len(output_ops) != 0:
            self.output_name, output_info = output_ops.popitem()

        if self.output_name == "":
            print("Can't find a DetectionOutput layer in the topology")
            sys.exit(-1)

        output_info.precision = "FP32"

        self.exec_net = self.ie.load_network(network=self.net,
                                             device_name="CPU")
Beispiel #22
0
def get_net_copy_with_output(model: str, output: str, core: IECore):
    net_copy = get_net(model=model, core=core)
    func = ng.function_from_cnn(net_copy)
    if output not in ['None', None]:
        # output with port_id in name is absent in ops list
        founded_op = [op for op in func.get_ops() if op.friendly_name == output]
        if founded_op:
            net_copy.add_outputs(output)
        else:
            split = output.rsplit(".", 1)
            net_copy.add_outputs((split[0], int(split[1])))
    return net_copy
Beispiel #23
0
 def _get_output_info(self):
     def get_parent(node):
         return node.inputs()[0].get_source_output().get_node()
     ng_func = ngraph.function_from_cnn(self.net)
     output_info = {}
     for node in ng_func.get_ordered_ops():
         layer_name = node.get_friendly_name()
         if layer_name not in self.net.outputs:
             continue
         shape = list(get_parent(node).shape)
         yolo_params = self.Params(node._get_attributes(), shape[2:4])
         output_info[layer_name] = (shape, yolo_params)
     return output_info
def get_objects(output, net, new_frame_height_width, source_height_width, prob_threshold, is_proportional):
    objects = list()
    function = ng.function_from_cnn(net)
    for layer_name, out_blob in output.items():
        #out_blob = out_blob.buffer.reshape(net.layers[net.layers[layer_name].parents[0]].out_data[0].shape)
        #layer_params = YoloParams(net.layers[layer_name].params, out_blob.shape[2])
        out_blob = out_blob.buffer.reshape(net.outputs[layer_name].shape)
        params = [x._get_attributes() for x in function.get_ordered_ops() if x.get_friendly_name() == layer_name][0]
        layer_params = YoloParams(params, out_blob.shape[2])
        objects += parse_yolo_region(out_blob, new_frame_height_width, source_height_width, layer_params,
                                     prob_threshold, is_proportional)

    return objects
Beispiel #25
0
def test_get_ops_from_IENetwork():
    ie = IECore()
    net = ie.read_network(model=test_net_xml, weights=test_net_bin)
    func = ng.function_from_cnn(net)
    ops = func.get_ordered_ops()
    ops_names = [op.friendly_name for op in ops]
    assert ops_names == ['data', '20/mean/Fused_Mul_614616_const', '19/WithoutBiases', 'data_add_575/copy_const',
                         '19/Fused_Add_', '21', '22', 'onnx_initializer_node_8/Output_0/Data__const',
                         '23/WithoutBiases', '23/Dims357/copy_const', '23', '25/mean/Fused_Mul_618620_const',
                         '24/WithoutBiases', 'data_add_578583/copy_const', '24/Fused_Add_', '26', '27',
                         '28/Reshape/Cast_1955_const', '28/Reshape', 'onnx_initializer_node_17/Output_0/Data__const',
                         '29/WithoutBiases', 'onnx_initializer_node_18/Output_0/Data_/copy_const', '29', 'fc_out',
                         'fc_out/sink_port_0']
    def __init__(self, *args, upsample_ratio=1, **kwargs):
        super().__init__(*args, **kwargs)

        self.pooled_heatmaps_blob_name = 'pooled_heatmaps'
        self.heatmaps_blob_name = 'heatmaps'
        self.pafs_blob_name = 'pafs'

        function = ng.function_from_cnn(self.net)
        paf = function.get_output_op(0)
        paf = paf.inputs()[0].get_source_output().get_node()
        paf.set_friendly_name(self.pafs_blob_name)
        heatmap = function.get_output_op(1)
        heatmap = heatmap.inputs()[0].get_source_output().get_node()
        heatmap.set_friendly_name(self.heatmaps_blob_name)

        # Add keypoints NMS to the network.
        # Heuristic NMS kernel size adjustment depending on the feature maps upsampling ratio.
        p = int(np.round(6 / 7 * upsample_ratio))
        k = 2 * p + 1
        pooled_heatmap = ng.max_pool(heatmap,
                                     kernel_shape=(k, k),
                                     pads_begin=(p, p),
                                     pads_end=(p, p),
                                     strides=(1, 1),
                                     name=self.pooled_heatmaps_blob_name)
        f = ng.impl.Function([
            ng.result(heatmap, name=self.heatmaps_blob_name),
            ng.result(pooled_heatmap, name=self.pooled_heatmaps_blob_name),
            ng.result(paf, name=self.pafs_blob_name)
        ], function.get_parameters(), 'hpe')

        self.image_blob_name = self._get_inputs(self.net)
        self.net = IENetwork(ng.impl.Function.to_capsule(f))
        self.exec_net = self.ie.load_network(
            network=self.net,
            device_name=self.device,
            num_requests=self.max_num_requests)
        self.requests = self.exec_net.requests
        self.empty_requests = deque(self.requests)

        self.num_joints = self.net.outputs[self.heatmaps_blob_name].shape[
            1] - 1  # The last channel is for background.
        target_size = self.net.input_info[
            self.image_blob_name].input_data.shape[-2]
        self.output_scale = target_size / self.net.outputs[
            self.heatmaps_blob_name].shape[-2]
        if self.target_size is None:
            self.target_size = target_size

        self.decoder = OpenPoseDecoder(num_joints=self.num_joints)
Beispiel #27
0
def test_query_network(device):
    ie = IECore()
    if device == "CPU":
        if ie.get_metric(device, "FULL_DEVICE_NAME") == "arm_compute::NEON":
            pytest.skip("Can't run on ARM plugin due-to ngraph")
    import ngraph as ng
    net = ie.read_network(model=test_net_xml, weights=test_net_bin)
    query_res = ie.query_network(net, device)
    func_net = ng.function_from_cnn(net)
    ops_net = func_net.get_ordered_ops()
    ops_net_names = [op.friendly_name for op in ops_net]
    assert [key for key in query_res.keys() if key not in ops_net_names] == [], \
        "Not all network layers present in query_network results"
    assert next(iter(set(query_res.values()))) == device, "Wrong device for some layers"
Beispiel #28
0
def test_offline_api():
    element_type = Type.f32
    param = Parameter(element_type, Shape([1, 3, 22, 22]))
    relu = ng.relu(param)
    func = Function([relu], [param], 'test')
    caps = Function.to_capsule(func)

    cnnNetwork = IENetwork(caps)
    assert cnnNetwork != None

    ApplyMOCTransformations(cnnNetwork, False)

    func2 = ng.function_from_cnn(cnnNetwork)
    assert func2 != None
    assert len(func2.get_ops()) == 3
def test_import_onnx_with_external_data():
    model_path = os.path.join(os.path.dirname(__file__), "models/external_data.onnx")
    ie = IECore()
    ie_network = ie.read_network(model=model_path)

    ng_function = ng.function_from_cnn(ie_network)

    dtype = np.float32
    value_a = np.array([1.0, 3.0, 5.0], dtype=dtype)
    value_b = np.array([3.0, 5.0, 1.0], dtype=dtype)
    # third input [5.0, 1.0, 3.0] read from external file

    runtime = get_runtime()
    computation = runtime.computation(ng_function)
    result = computation(value_a, value_b)
    assert np.allclose(result, np.array([3.0, 3.0, 3.0], dtype=dtype))
Beispiel #30
0
def test_reshape_with_partial_shape(device, shape, p_shape):
    function = create_relu(shape)
    net = ng.function_to_cnn(function)
    net.reshape({"data": p_shape})
    changedFunction = ng.function_from_cnn(net)
    p_shape = ng.impl.PartialShape(p_shape)
    assert changedFunction.get_parameters()[0].get_partial_shape().is_dynamic
    assert changedFunction.get_results()[0].get_output_partial_shape(
        0).is_dynamic
    assert function.get_parameters()[0].get_partial_shape().is_dynamic
    assert function.get_results()[0].get_output_partial_shape(0).is_dynamic
    assert changedFunction.get_parameters()[0].get_partial_shape() == p_shape
    assert changedFunction.get_results()[0].get_output_partial_shape(
        0) == p_shape
    assert function.get_parameters()[0].get_partial_shape() == p_shape
    assert function.get_results()[0].get_output_partial_shape(0) == p_shape