Example #1
0
    def infer(node: None):
        input_shape = node.in_node(0).shape
        name = node.soft_get('name', node.id)
        axes = node.axes
        if axes is not None:
            if 0 in axes:
                raise Error('Reduction over the batch dimension in node "{}" '
                            'is not supported by the backend.'.format(name))
            for i in range(2, len(input_shape)):
                if i not in axes:
                    raise Error(
                        'Reduction over spatial dimensions in node "{}" '
                        'is obligatory for the backend.'.format(name))
            if 1 in axes and not node.across_channels:
                raise Error(
                    'Inconsistent values of axes ({}) and across_channels ({}) parameters '
                    'in node "{}".'.format(str(axes),
                                           str(node.across_channels), name))

        copy_shape_infer(node)
Example #2
0
 def __init__(self, graph: Graph, attrs: dict):
     super().__init__(
         graph, {
             'op': self.op,
             'type': None,
             'version': None,
             'infer': lambda n: copy_shape_infer(n, copy_value),
             'type_infer': None,
             'in_ports_count': 1,
             'out_ports_count': 1,
         }, attrs)
Example #3
0
    def test_region_infer(self):
        graph = build_graph(
            nodes_attributes, [('node_1', 'norm'), ('norm', 'node_3'),
                               ('node_3', 'op_output')], {
                                   'node_3': {
                                       'shape': None
                                   },
                                   'node_1': {
                                       'shape': np.array([1, 3, 227, 227]),
                                       'value': None
                                   },
                                   'norm': {}
                               })

        norm_node = Node(graph, 'norm')
        copy_shape_infer(norm_node)
        exp_shape = np.array([1, 3, 227, 227])
        res_shape = graph.node['node_3']['shape']
        for i in range(0, len(exp_shape)):
            self.assertEqual(exp_shape[i], res_shape[i])
Example #4
0
 def infer(node: Node):
     if len(node.in_nodes()) > 0:
         # In case this is a memory node with input,
         # It should not have output
         # However in order not to break MO pipeline,
         # we just set the same shape to the output
         # node that will be removed later in pipeline
         copy_shape_infer(node)
         return
     elif node.has_valid('shape'):
         # For Memories, that has not input infer shapes is very difficult
         # But often we can know shape in extracting attributes
         # And we can set the attribute 'shape' in extracting
         batch = 1
         for out_node in node.out_nodes().values():
             out_node.shape = [batch, *node.shape[:]]
         return
     else:
         raise Error('Model Optimizer is unable to calculate output shape of Memory node {}. ' +
                     refer_to_faq_msg(88),
                     node.id)
Example #5
0
    def test_grn_infer(self):
        graph = build_graph(
            nodes_attributes, [('node_1', 'grn'), ('grn', 'node_3')], {
                'node_3': {
                    'is_output': True,
                    'shape': None
                },
                'node_1': {
                    'shape': np.array([1, 3, 227, 227])
                },
                'grn': {
                    'bias': 1
                }
            })

        grn_node = Node(graph, 'grn')
        copy_shape_infer(grn_node)
        exp_shape = np.array([1, 3, 227, 227])
        res_shape = graph.node['node_3']['shape']
        for i in range(0, len(exp_shape)):
            self.assertEqual(exp_shape[i], res_shape[i])
    def infer(node: Node):
        assert node.has_valid(
            'dst_type'
        ), 'Destination type of "Cast" operation should be extracted earlier'
        dst_type = node.dst_type
        copy_shape_infer(node)
        if node.in_node(0).has_valid('value'):
            new_blob, finite_match_count, zero_match_count = convert_blob(
                node.in_node(0).value, dst_type)
            node.out_port(0).data.set_value(new_blob)

            if finite_match_count:
                log.error((
                    "{} elements of {} were clipped to infinity while converting an input blob for node '{}' to {}. "
                    + refer_to_faq_msg(76)).format(finite_match_count,
                                                   new_blob.size, node.name,
                                                   dst_type))
            if zero_match_count:
                log.warning((
                    "{} elements of {} were clipped to zero while converting an input blob for node '{}' to {}. "
                    + refer_to_faq_msg(77)).format(zero_match_count,
                                                   new_blob.size, node.name,
                                                   dst_type))
Example #7
0
 def infer(node: Node):
     # MemoryOffset is splitted in 2 parts to avoid cycle in graph
     # Calculate shape from shape of previous layer where possible
     # In other cases information about shapes from initial Kaldi model used
     if not node.in_port(0).disconnected():
         copy_shape_infer(node)
         pair_node = Node(node.graph, node.pair_name)
         pair_node.out_port(0).data.set_shape(
             node.out_port(0).data.get_shape())
     else:
         pair_node = Node(node.graph, node.pair_name)
         if pair_node.in_port(0).data.get_shape() is not None:
             node.out_port(0).data.set_shape(
                 pair_node.in_port(0).data.get_shape())
             copy_shape_infer(pair_node)
         elif pair_node.has_valid('element_size'):
             # TODO Add here real batch
             node.out_port(0).data.set_shape(
                 np.array([1, pair_node['element_size']]))
         elif pair_node.in_port(0).get_source().node.has_valid('out-size'):
             out_size = pair_node.in_port(0).get_source().node['out-size']
             node.out_port(0).data.set_shape(np.array([1, out_size]))
         elif pair_node.in_port(0).get_source().node.op in ["Add", "ReLU"] and \
                 pair_node.in_port(0).get_source().node.in_port(0).get_source().node.has_valid('out-size'):
             out_size = pair_node.in_port(0).get_source().node.in_port(
                 0).get_source().node['out-size']
             node.out_port(0).data.set_shape(np.array([1, out_size]))
         elif pair_node.in_port(0).get_source().node.has_valid('in_dim'):
             out_size = pair_node.in_port(0).get_source().node['in_dim']
             node.out_port(0).data.set_shape(np.array([1, out_size]))
         else:
             raise Error(
                 "Can't calculate MemoryOffset shape for node {}. ".format(
                     node.id) +
                 "Possibly you need to add shape for it through --input_shape"
             )
Example #8
0
 def test_copy_shape_infer(self, single_output_infer_mock):
     single_output_infer_mock.return_value = 0
     node = FakeNode(np.array([1, 2]))
     copy_shape_infer(node)
     self.assertTrue(single_output_infer_mock.called)
 def shape_infer(node):
     copy_shape_infer(node)
Example #10
0
def batch_norm_4_infer(node: Node):
    copy_shape_infer(node)
    mark_input_bins(node, ['weights', 'biases', 'mean', 'variance'])
    if node.has('fix_gamma') and node.fix_gamma:
        # go to the 1-st input weights and set all elements to 1
        node.in_node(1).value = np.full_like(node.in_node(1).value, 1, dtype=np.float32)
Example #11
0
 def infer(node: Node):
     if node.axis < 0:
         node.axis = len(node.in_node().shape) + node.axis
     copy_shape_infer(node)
     PermuteAttrs.create_permute_attrs(node, attrs=[('axis', 'input:0')])
Example #12
0
 def infer(node: Node):
     assert node.has_valid('dst_type'), 'Destination type of "Cast" operation should be extracted earlier'
     copy_shape_infer(node, lambda n: n.in_node().value.astype(n.dst_type))
Example #13
0
 def infer(node: Node):
     mark_input_bins(node)
     copy_shape_infer(node)
Example #14
0
def roll_infer(node: Node):
    PermuteInputs().set_input_permutation(node.in_node(2), node, 'input:0',
                                          'axis')
    copy_shape_infer(node)
Example #15
0
 def infer(node: Node):
     # we just set the same shape to the output
     copy_shape_infer(node)
Example #16
0
 def shape_infer(node):
     copy_shape_infer(node, value_infer=copy_value)
Example #17
0
 def infer(node: Node):
     if node.axis < 0:
         node.axis = len(node.in_node().shape) + node.axis
     copy_shape_infer(node)
Example #18
0
 def infer(node: Node):
     copy_shape_infer(node, lambda n: n.in_node().value.astype(n.dst_type))