def common_backpropdata_extender(op: Node):
    for attr in [
            'strides', 'output_padding', 'pads_begin', 'pads_end', 'dilations'
    ]:
        Extender.attr_to_list(op, attr)

    if op.has_valid('output_padding'):
        op.output_padding = int64_array([0, 0] + op.output_padding)

    dim = len(op.strides)

    if op.has_valid('pads_begin') and op.has_valid('pads_end'):
        pad = [[0, 0], [0, 0]]
        pad.extend([[op.pads_begin[i], op.pads_end[i]] for i in range(dim)])

        op['pad'] = int64_array(pad)

    op['spatial_dims'] = [i + 2 for i in range(dim)]

    if not op.has_valid('dilations'):
        op['dilations'] = [1 for _ in range(dim)]
    if not op.has_valid('strides'):
        op['strides'] = [1 for _ in range(dim)]

    op['dilation'] = int64_array([1, 1] + op.dilations)
    op['stride'] = int64_array([1, 1] + op.strides)

    op['infer'] = backpropdata_infer
 def extend(op: Node):
     if op.has_valid('classes_index_type'):
         op['classes_index_type'] = destination_type_to_np_data_type(
             op.classes_index_type)
     if op.has_valid('sequence_length_type'):
         op['sequence_length_type'] = destination_type_to_np_data_type(
             op.sequence_length_type)
Beispiel #3
0
    def extend(op: Node):
        if not op.has_valid('activations'):
            op['activations'] = None

        mark_input_bins(op, start_port=2)

        op['need_copy_input_blobs'] = True
Beispiel #4
0
    def extend(op: Node):
        for attr in [
                'strides', 'dilations', 'pads_begin', 'pads_end',
                'output_padding'
        ]:
            Extender.attr_to_list(op, attr)

        op['stride'] = int64_array([1, 1] + op.strides)
        op['dilation'] = int64_array([1, 1] + op.dilations)

        op['batch_dims'] = int64_array([0])
        op['channel_dims'] = int64_array([1])

        if op.has_valid('output_padding'):
            op.output_padding = int64_array([0, 0] + op.output_padding)

        # Be VERY careful with these attributes!
        op['input_feature_channel'] = 1
        op['output_feature_channel'] = 0

        dim = len(op.pads_begin)

        assert dim in (1, 2, 3), '{}D Convolution not supported!'.format(dim)

        pad = [[0, 0], [0, 0]]
        pad.extend([[op.pads_begin[i], op.pads_end[i]] for i in range(dim)])

        op['pad'] = int64_array(pad)

        op['spatial_dims'] = [i + 2 for i in range(dim)]
def deconvolution_infer(node: Node):
    dims = int64_array(node.in_node(0).shape)
    dilations = int64_array(node.dilations)
    strides = int64_array(node.strides)
    input_n = dims[0]
    kernel_shape = int64_array(node.kernel)
    kdims = np.where(dilations != 0, (kernel_shape - 1) * dilations + 1,
                     kernel_shape)
    oc = node.output

    if node.has_valid('auto_pad') and node.auto_pad in [
            'valid', 'same_upper', 'same_lower'
    ]:
        auto_pad = node.auto_pad
        if auto_pad == 'valid':
            od_temp = (dims[2::] - 1) * strides + kdims
        else:
            od_temp = dims[2::] * strides
    else:
        od_temp = strides * (dims[2::] -
                             1) + kdims - node.pads_begin - node.pads_end

    out_shape = [input_n, oc]
    for d in od_temp:
        out_shape.append(np.int64(d))

    node['output_shape'] = int64_array(out_shape)
    for n in node.out_nodes():
        node.out_node(n).shape = node['output_shape']
 def attr_restore(node: Node, attribute: str, value=None):
     # Function to restore some specific attr for PriorBox & PriorBoxClustered layers
     if not node.has_valid(attribute):
         node[attribute] = [] if value is None else [value]
     if isinstance(node[attribute], str):
         node[attribute] = []
     else:
         Extender.attr_to_list(node, attribute)
Beispiel #7
0
 def extend(op: Node):
     assert op.has_valid(
         'element_type'
     ), 'Parameter node {} has missed element_type attr!'.format(op.name)
     op['data_type'] = destination_type_to_np_data_type(op.element_type)
     if op.shape == '':
         op.shape = int64_array([])
     else:
         Extender.attr_to_list(op, 'shape')
    def extend(op: Node):
        if op.has_valid('output_padding'):
            op.output_padding = int64_array([0, 0] + op.output_padding)

        dim = len(op.strides)

        # assert dim in (2, 3), '{}D Deconvolution not supported!'.format(dim)

        if op.has_valid('pads_begin') and op.has_valid('pads_end'):

            pad = [[0, 0], [0, 0]]
            pad.extend([[op.pads_begin[i], op.pads_end[i]]
                        for i in range(dim)])

            op['pad'] = int64_array(pad)

        op['spatial_dims'] = [i + 2 for i in range(dim)]

        if not op.has_valid('dilations'):
            op['dilations'] = [1 for _ in range(dim)]
        if not op.has_valid('strides'):
            op['strides'] = [1 for _ in range(dim)]

        op['dilation'] = int64_array([1, 1] + op.dilations)
        op['stride'] = int64_array([1, 1] + op.strides)

        if not op.has_valid('old_type'):
            # op['batch_dims'] = int64_array([0])     # ?
            op['channel_dims'] = int64_array([1])

            op['input_feature_channel'] = 0
            op['output_feature_channel'] = 1

            op['kernel_spatial'] = op.kernel

            # if op.has_valid('auto_pad'):
            #     op['auto_pad'] = None

            op['infer'] = deconvolution_infer  # TODO Remove after supporting
        else:
            op['infer'] = backpropdata_infer
Beispiel #9
0
 def extend(op: Node):
     if op.has_valid('output_type'):
         op['output_type'] = destination_type_to_np_data_type(
             op.output_type)
Beispiel #10
0
 def extend(op: Node):
     if op.out_port(0).disconnected():
         op['remove_values_output'] = True
     if op.has_valid('index_element_type'):
         op['index_element_type'] = destination_type_to_np_data_type(
             op.index_element_type)
Beispiel #11
0
 def extend(op: Node):
     if op.graph.graph['cmd_params'].framework in ('tf', 'caffe'):
         op['remove_values_output'] = True
     if op.has_valid('index_element_type'):
         op['index_element_type'] = destination_type_to_np_data_type(
             op.index_element_type)
Beispiel #12
0
 def extend(op: Node):
     if not op.has_valid('activations'):
         op['activations'] = None
     op['infer'] = Extender.const_shape_infer
Beispiel #13
0
 def attr_to_list(node: Node, attribute: str):
     if not node.has_valid(attribute):
         log.warning('Attribute {} missed in node {} with type {}!'.format(
             attribute, node.soft_get('name'), node.soft_get('type')))
     elif not isinstance(node[attribute], list):
         node[attribute] = [node[attribute]]
Beispiel #14
0
 def extend(op: Node):
     if not op.has_valid('activations'):
         op['activations'] = None
     op['infer'] = Extender.use_shapes_from_ir
 def extend(op: Node):
     if not op.has_valid('activations'):
         op['activations'] = None