Exemplo n.º 1
0
 def extract(cls, node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     ReduceMax.update_node_stat(node, {
         'axis': int64_array([attrs.int('axis', 0)]),
         'keep_dims': False
     })
     return cls.enabled
Exemplo n.º 2
0
 def extract(cls, einsum_node):
     einsum_name = einsum_node.soft_get('name', einsum_node.id)
     attrs = get_mxnet_layer_attrs(einsum_node.symbol_dict)
     equation = attrs.str('subscripts')
     normalized_equation = Einsum.normalize_equation(einsum_name, equation)
     Einsum.update_node_stat(einsum_node, {'equation': normalized_equation})
     return cls.enabled
Exemplo n.º 3
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        act_type = attrs.str('act_type', 'leaky')
        if act_type == 'prelu':
            prelu_attrs = {'channel_shared': 1,
                           'filler_type': 'constant',
                           'filler_value': 0,
                           'min': 0,
                           'max': 1,
                           'mean': 0,
                           'std': 0,
                           'sparse': -1,
                           'variance_norm': "caffe.FillerParameter.FAN_IN"}
            PReLU.update_node_stat(node, prelu_attrs)
        elif act_type == 'elu':
            alpha = attrs.float('slope', 0.25)
            Elu.update_node_stat(node, {'alpha': alpha})
        elif act_type == 'leaky':
            negative_slope = attrs.float('slope', 0.25)
            if negative_slope == 0:
                ReLU.update_node_stat(node)
            else:
                LeakyReLU.update_node_stat(node, {'negative_slope': negative_slope})
        elif act_type == 'gelu':
            GeLUOP.update_node_stat(node, {'approximation_mode': 'erf'})
        else:
            raise Error(
                "Operation '{}' not supported. Please register it as custom op. " +
                refer_to_faq_msg(86),
                act_type)

        return LeakyReLUFrontExtractor.enabled
Exemplo n.º 4
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        top_k = attrs.int("nms_topk", -1)
        nms_threshold = attrs.float("nms_threshold", 0.5)
        confidence_threshold = attrs.float("threshold", 0.01)
        clip = 0 if not attrs.bool("clip", True) else 1

        node_attrs = {
            'type': 'DetectionOutput',
            'op': __class__.op,
            'keep_top_k': top_k,
            'variance_encoded_in_target': 0,
            'code_type': "caffe.PriorBoxParameter.CENTER_SIZE",
            'share_location': 1,
            'confidence_threshold': confidence_threshold,
            'background_label_id': 0,
            'nms_threshold': nms_threshold,
            'top_k': top_k,
            'decrease_label_id': 1,
            'clip_before_nms': clip,
            'normalized': 1,
        }

        DetectionOutput.update_node_stat(node, node_attrs)

        return cls.enabled
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        spatial_scale = attrs.float('spatial_scale', None)
        group_size = attrs.int('group_size', 0)
        no_trans = attrs.bool('no_trans', False)
        trans_std = attrs.float('trans_std', 0)
        output_dim = attrs.int('output_dim', 0)
        part_size = attrs.int('part_size', 0)
        sample_per_part = attrs.int('sample_per_part', 1)
        pooled_size = attrs.int('pooled_size', 0)

        data = {
            'spatial_scale': spatial_scale,
            'mode': 'bilinear_deformable',
            'group_size': group_size,
            'output_dim': output_dim,
            'trans_std': trans_std,
            'part_size': part_size,
            'spatial_bins_x': sample_per_part,
            'spatial_bins_y': sample_per_part,
            'pooled_width': pooled_size,
            'pooled_height': pooled_size,
        }

        DeformablePSROIPoolingOp.update_node_stat(node, data)
        return cls.enabled
Exemplo n.º 6
0
    def extract(cls, node: Node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        AttributedClamp.update_node_stat(node, {
            'min': attrs.float('a_min', None),
            'max': attrs.float('a_max', None)
        })
        return cls.enabled
Exemplo n.º 7
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        axes = list(attrs.tuple("axes", int, []))
        node_attrs = {'axes': axes}

        # update the attributes of the node
        SliceLike.update_node_stat(node, node_attrs)
        return cls.enabled
Exemplo n.º 8
0
 def extract(self, node):
     supported = False
     op_attrs = None
     node_attrs = get_mxnet_layer_attrs(node.symbol_dict)
     op_type = node_attrs.str('op_type', None)
     if op_type and op_type in MXNetCustomFrontExtractorOp.registered_ops:
         supported, op_attrs = MXNetCustomFrontExtractorOp.registered_ops[op_type]().extract(node)
     return supported, op_attrs
Exemplo n.º 9
0
 def extract(cls, node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     shift = list(attrs.tuple("shift", int, None))
     axis = None
     if attrs.has("axis"):
         axis = list(attrs.tuple("axis", int, None))
     AttributedRoll.update_node_stat(node, {'axes': axis, 'shift': shift})
     return cls.enabled
Exemplo n.º 10
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        Squeeze.update_node_stat(node, {
            'squeeze_dims': attrs.int("axis", None),
            'keep_at_least_1d': True
        })
        return cls.enabled
Exemplo n.º 11
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        data = {
            'axis': attrs.int("dim", 1),
        }

        # update the attributes of the node
        Concat.update_node_stat(node, data)
        return cls.enabled
Exemplo n.º 12
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        update_attrs = {'axis': attrs.int('axis', 0)}

        # update the attributes of the node
        PackOp.update_node_stat(node, update_attrs)

        return cls.enabled
Exemplo n.º 13
0
 def extract(cls, node: Node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     ArangeLikeOp.update_node_stat(node, {
         'start': attrs.float('start', 0),
         'repeat': attrs.int('repeat', 1),
         'step': attrs.float('step', 1),
         'axis': attrs.int('axis', None),
     })
     return cls.enabled
Exemplo n.º 14
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        output_size = attrs.tuple("output_size", int, None)
        if len(output_size) == 1:
            output_size = (output_size[0], output_size[0])

        data = {'op': 'Pooling', 'output_size': output_size}
        AdaptiveAvgPooling.update_node_stat(node, data)
        return cls.enabled
Exemplo n.º 15
0
 def extract(cls, node: Node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     Range.update_node_stat(node, {
         'start': attrs.int('start', 0),
         'stop': attrs.int('stop', 0),
         'repeat': attrs.int('repeat', 1),
         'step': attrs.float('step', 1),
         'dtype': np.dtype(attrs.str('dtype ', 'float32'))
     })
     return cls.enabled
Exemplo n.º 16
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        node_attrs = {
            'crop_begin': mo_array(attrs.tuple("begin", int, ())),
            'crop_end': mo_array(attrs.tuple("end", int, ())),
            'step': mo_array(attrs.tuple("step", int, ())),
        }

        MXSlice.update_node_stat(node, node_attrs)
        return cls.enabled
Exemplo n.º 17
0
    def extract(cls, node: Node):
        attr = get_mxnet_layer_attrs(node.symbol_dict)

        node_attrs = {
            'epsilon': attr.float('eps', 9.99999975e-06),
            'axis': attr.int('axis', -1),
            'output_mean_var': attr.bool('output_mean_var', False)
        }
        LayerNorm.update_node_stat(node, node_attrs)
        return cls.enabled
Exemplo n.º 18
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        update_attrs = {
            'axis': attrs.int('axis', 0),
            'mx_out_type': attrs.dtype('dtype', None)
        }

        MXNetCumSum.update_node_stat(node, update_attrs)
        return cls.enabled
Exemplo n.º 19
0
 def extract(cls, node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     offset = attrs.tuple("offset", int, ())
     axis = attrs.int("num_args", 0)
     node_attrs = {
         'axis': axis,
         'offset': list(offset),
         'dim': None,
     }
     Crop.update_node_stat(node, node_attrs)
     return cls.enabled
Exemplo n.º 20
0
    def extract(cls, node):
        attr = get_mxnet_layer_attrs(node.symbol_dict)

        kernel = attr.tuple("kernel", int, None)
        stride = attr.tuple("stride", int,
                            tuple(np.ones(len(kernel), dtype=np.int64)))
        padding = attr.tuple("pad", int,
                             tuple(np.zeros(len(kernel), dtype=np.int64)))
        dilate = attr.tuple("dilate", int,
                            tuple(np.ones(len(kernel), dtype=np.int64)))
        group = attr.int("num_group", 1)
        output = attr.int("num_filter", None)
        bias_term = not attr.bool("no_bias", True)
        target_shape = attr.tuple("target_shape", int, None)
        if target_shape:
            target_shape = int64_array(target_shape)

        final_dilations = int64_array([1, 1, *[d for d in dilate]
                                       ]) if dilate is not None else None
        node_attrs = {
            'op': __class__.op,
            'type': 'Deconvolution',
            'bias_addable': True,
            'bias_term': bias_term,
            'pad': int64_array([[0, 0], [0, 0],
                                *[[pad, pad] for pad in padding]]),
            'pad_spatial_shape': int64_array([[pad, pad] for pad in padding]),
            'dilation': final_dilations,
            'output_spatial_shape': target_shape,
            'original_output_spatial_shape': target_shape,
            'output_shape': None,
            'stride': int64_array([1, 1, *[s for s in stride]]),
            'group': group,
            'output': output,
            'kernel_spatial': int64_array([k for k in kernel]),
            'input_feature_channel': 1,
            'output_feature_channel': 0,
            'kernel_spatial_idx': None,
            'reshape_kernel': True,
            'spatial_dims': None,
            'channel_dims': int64_array([1]),
            'batch_dims': int64_array([0]),
            'layout': 'NCHW',
            'get_pad': DeconvFrontExtractor.get_pad,
        }

        output_padding = attr.tuple("adj", int, None)
        if target_shape is None and output_padding:
            node_attrs["output_padding"] = int64_array(
                [0, 0, *[s for s in output_padding]])

        # update the attributes of the node
        Convolution.update_node_stat(node, node_attrs)
        return cls.enabled
Exemplo n.º 21
0
 def extract(cls, node):
     attr = get_mxnet_layer_attrs(node.symbol_dict)
     num_hidden = attr.int('num_hidden', None)
     assert num_hidden is not None, "{} node with no `num_hidden` parameter found".format(cls.op)
     attrs = {
         'out-size': num_hidden,
         'transpose_weights': True,
         'flatten': attr.bool('flatten', True)
     }
     FullyConnected.update_node_stat(node, attrs)
     return cls.enabled
Exemplo n.º 22
0
    def extract(cls, node: Node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        axis = attrs.int('axis', 0)
        repeats = attrs.int('repeats', None)
        assert repeats is not None and repeats > 0, \
            '`repeat` op requires positive `repeats` attribute, but it is {} for node {}'.format(repeats, node.name)

        MXRepeat.update_node_stat(node, {
            'axis': axis,
            'repeats': repeats,
        })
        return cls.enabled
Exemplo n.º 23
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        update_attrs = {
            'type': 'SoftMax',
            'axis': attrs.int("axis", -1),
            'temperature': attrs.float('temperature', 1.0)
        }

        # update the attributes of the node
        Softmax.update_node_stat(node, update_attrs)
        return cls.enabled
Exemplo n.º 24
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        scale = attrs.int("scale", 1)
        num_filter = attrs.int("num_filter", 0)
        mode = attrs.str("sample_type", None)
        if mode == 'nearest':
            node_attrs = {
                'factor': attrs.int("scale", 1),
                'mode': mode,
                'antialias': 0,
                'axes': int64_array([2, 3]),
            }
            Interpolate.update_node_stat(node, node_attrs)
        elif mode == 'bilinear':
            """
            Bilinear UpSampling uses deconvolution algorithm under the hood.
            For MXNet Bilinear UpSampling op just wrapper over Deconvolution op.
            Inputs data:
                input1 - input data
                input2 - deconvolution weight
            """
            kernel = 2 * scale - scale % 2
            stride = scale
            pad = math.ceil((scale - 1) / 2)
            num_group = num_filter

            node_attrs = {
                'op': __class__.op,
                'type': 'Deconvolution',
                'bias_addable': True,
                'bias_term': False,
                'pad': int64_array([[0, 0], [0, 0], [pad, pad], [pad, pad]]),
                'pad_spatial_shape': int64_array([[pad, pad], [pad, pad]]),
                'dilation': None,
                'output_spatial_shape': None,
                'output_shape': None,
                'stride': int64_array([1, 1, stride, stride]),
                'group': num_group,
                'output': num_filter,
                'kernel_spatial': int64_array([kernel, kernel]),
                'input_feature_channel': 0,
                'output_feature_channel': 1,
                'kernel_spatial_idx': None,
                'reshape_kernel': True,
                'spatial_dims': None,
                'channel_dims': int64_array([1]),
                'batch_dims': int64_array([0]),
                'layout': 'NCHW',
                'get_pad': DeconvFrontExtractor.get_pad,
            }
            Convolution.update_node_stat(node, node_attrs)
        return cls.enabled
Exemplo n.º 25
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        kernel = attrs.tuple("kernel", int, None)
        stride = attrs.tuple("stride", int,
                             tuple(np.ones(len(kernel), dtype=np.int64)))
        padding = attrs.tuple("pad", int,
                              tuple(np.zeros(len(kernel), dtype=np.int64)))
        method = attrs.str("pool_type", None)
        rt = 'floor'

        data = {
            'window':
            np.array([1, 1, *[k for k in kernel]], dtype=np.int64),
            'stride':
            np.array([1, 1, *[s for s in stride]], dtype=np.int64),
            'pad':
            np.array([[0, 0], [0, 0], *[[pad, pad] for pad in padding]],
                     dtype=np.int64),
            'pad_spatial_shape':
            np.array([[pad, pad] for pad in padding], dtype=np.int64),
            'pool_method':
            method,
            'exclude_pad':
            False,
            'output_spatial_shape':
            None,
            'spatial_dims':
            None,
            'channel_dims':
            np.array([1], dtype=np.int64),
            'batch_dims':
            np.array([0], dtype=np.int64),
            'layout':
            'NCHW',
            'rounding_type':
            rt,
        }

        pooling_conv = attrs.str("pooling_convention", 'valid')
        if pooling_conv:
            data["pooling_convention"] = pooling_conv
            if pooling_conv == 'full':
                data["rounding_type"] = 'ceil'

        global_pool = attrs.bool("global_pool", False)
        if global_pool:
            data["global_pool"] = global_pool

        # update the attributes of the node
        Pooling.update_node_stat(node, data)
        return cls.enabled
Exemplo n.º 26
0
def extract(node):
    attrs = get_mxnet_layer_attrs(node.symbol_dict)
    dim1 = attrs.int("dim1", 0)
    dim2 = attrs.int("dim2", 0)

    update_attrs = {
        'dim1': dim1,
        'dim2': dim2,
    }

    # update the attributes of the node
    SwapAxis.update_node_stat(node, update_attrs)
    return True
Exemplo n.º 27
0
 def extract(cls, node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     shape = list(attrs.tuple("shape", int, None))
     high = attrs.float("high", 1.0)
     low = attrs.float("low", 0.0)
     out_type = attrs.dtype("dtype", np.float32)
     new_attrs = {
         'shape': shape,
         'min_val': out_type(low),
         'max_val': out_type(high),
         'output_type': out_type
     }
     AttributedRandomUniform.update_node_stat(node, new_attrs)
     return cls.enabled
Exemplo n.º 28
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        pads = np.array(list(attrs.tuple('pad_width', int, None)))
        pads = pads.reshape([-1, 2])
        value = attrs.float('constant_value', 0.0)

        node_attrs = {
            'pads': pads,
            'mode': attrs.str('mode', None),
            'fill_value': value,
        }

        AttributedPad.update_node_stat(node, node_attrs)
        return cls.enabled
    def extract(cls, node):
        attr = get_mxnet_layer_attrs(node.symbol_dict)

        kernel = attr.tuple("kernel", int, None)
        stride = attr.tuple("stride", int,
                            tuple(np.ones(len(kernel), dtype=np.int64)))
        padding = attr.tuple("pad", int,
                             tuple(np.zeros(len(kernel), dtype=np.int64)))
        dilate = attr.tuple("dilate", int,
                            tuple(np.ones(len(kernel), dtype=np.int64)))
        num_deformable_group = attr.int("num_deformable_group", 1)
        num_group = attr.int("num_group", 1)
        output = attr.int("num_filter", None)
        bias_term = attr.str("no_bias", 'False') == 'False'

        final_dilations = int64_array([1, 1, *[d for d in dilate]
                                       ]) if dilate is not None else None

        node_attrs = {
            'op': __class__.op,
            'bias_addable': True,
            'bias_term': bias_term,
            'pad': int64_array([[0, 0], [0, 0],
                                *[[pad, pad] for pad in padding]]),
            'pad_spatial_shape': int64_array([[pad, pad] for pad in padding]),
            'dilation': final_dilations,
            'output_spatial_shape': None,
            'output_shape': None,
            'stride': int64_array([1, 1, *[s for s in stride]]),
            'group': num_group,
            'deformable_group': num_deformable_group,
            'output': output,
            'kernel_spatial': int64_array([k for k in kernel]),
            'bilinear_interpolation_pad': True,
            'input_feature_channel': 1,
            'output_feature_channel': 0,
            'kernel_spatial_idx': None,
            'reshape_kernel': True,
            'weights_index': 2,
            'in_ports_count': 4,
            'spatial_dims': None,
            'channel_dims': int64_array([1]),
            'batch_dims': int64_array([0]),
            'layout': 'NCHW',
        }

        # update the attributes of the node
        DeformableConvolution.update_node_stat(node, node_attrs)
        return cls.enabled
Exemplo n.º 30
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        alpha = attrs.float("alpha", 0.0001)
        beta = attrs.float("beta", 0.75)
        knorm = attrs.float("knorm", 2.0)
        nsize = attrs.int("nsize", None)

        AttributedLRN.update_node_stat(node, {
            'alpha': alpha,
            'beta': beta,
            'bias': knorm,
            'local_size': nsize,
        })
        return cls.enabled