Пример #1
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        pre_nms_topn = attrs.int('rpn_pre_nms_top_n', 6000)
        post_nms_topn = attrs.int('rpn_post_nms_top_n', 300)
        nms_thresh = attrs.float('threshold', 0.7)
        min_size = attrs.int('rpn_min_size', 16)
        scale = attrs.tuple("scales", float, (4, 8, 16, 32))
        ratio = attrs.tuple("ratios", float, (0.5, 1, 2))
        feat_stride = attrs.int('feature_stride', 16)

        update_attrs = {
            'feat_stride': feat_stride,
            'ratio': np.array(ratio),
            'min_size': min_size,
            'scale': np.array(scale),
            'pre_nms_topn': pre_nms_topn,
            'post_nms_topn': post_nms_topn,
            'nms_thresh': nms_thresh,
            'base_size': feat_stride
        }

        # update the attributes of the node
        Op.get_op_class_by_name('Proposal').update_node_stat(
            node, update_attrs)
        return __class__.enabled
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        spatial_scale = attrs.float('spatial_scale', None)
        group_size = attrs.int('group_size', 0)
        no_trans = attrs.bool('no_trans', False)
        trans_std = attrs.float('trans_std', 0)
        output_dim = attrs.int('output_dim', 0)
        part_size = attrs.int('part_size', 0)
        sample_per_part = attrs.int('sample_per_part', 1)
        pooled_size = attrs.int('pooled_size', 0)

        data = {
            'spatial_scale': spatial_scale,
            'mode': 'bilinear_deformable',
            'group_size': group_size,
            'output_dim': output_dim,
            'trans_std': trans_std,
            'part_size': part_size,
            'spatial_bins_x': sample_per_part,
            'spatial_bins_y': sample_per_part,
            'pooled_width': pooled_size,
            'pooled_height': pooled_size,
        }

        DeformablePSROIPoolingOp.update_node_stat(node, data)
        return cls.enabled
Пример #3
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        act_type = attrs.str('act_type', 'leaky')
        if act_type == 'prelu':
            prelu_attrs = {
                'channel_shared': 1,
                'filler_type': 'constant',
                'filler_value': 0,
                'min': 0,
                'max': 1,
                'mean': 0,
                'std': 0,
                'sparse': -1,
                'variance_norm': "caffe.FillerParameter.FAN_IN"
            }
            PReLU.update_node_stat(node, prelu_attrs)
        elif act_type == 'elu':
            alpha = attrs.float('slope', 0.25)
            Elu.update_node_stat(node, {'alpha': alpha})
        elif act_type == 'leaky':
            negative_slope = attrs.float('slope', 0.25)
            if negative_slope == 0:
                ReLU.update_node_stat(node)
            else:
                LeakyReLU.update_node_stat(node,
                                           {'negative_slope': negative_slope})
        else:
            raise Error(
                "Operation '{}' not supported. Please register it as custom op. "
                + refer_to_faq_msg(86), act_type)

        return LeakyReLUFrontExtractor.enabled
Пример #4
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        mode = attrs.str('mode', None)
        state_size = attrs.int('state_size', None)
        bidirectional = attrs.bool('bidirectional', False)
        num_layers = attrs.int('num_layers', 1)

        node_attrs = {
            'batch_dim': 1,
            'sequence_dim': 0,
            'blobs_wrb': False,
            'hidden_size': state_size,
            'has_num_directions': bidirectional,
            'format': 'mxnet',
        }

        if bidirectional:
            raise Error(
                "Operation RNN with bidirectional not supported. num_directions = 1 is supported only " +
                refer_to_faq_msg(86))

        if num_layers > 1:
            raise Error(
                "Operation RNN with num_layers more then one not supported. num_layers = 1 is supported only " +
                refer_to_faq_msg(86))

        if mode == 'lstm':
            LSTMSequence.update_node_stat(node, node_attrs)
        else:
            raise Error(
                "Operation RNN with mode '{}' not supported. Please register RNN as custom op. " +
                refer_to_faq_msg(86),
                mode)
        return __class__.enabled
Пример #5
0
 def extract(cls, node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     ReduceMax.update_node_stat(node, {
         'axis': int64_array([attrs.int('axis', 0)]),
         'keep_dims': False
     })
     return cls.enabled
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        num_classes = 21
        top_k = attrs.int("nms_topk", -1)
        keep_top_k = top_k
        variance_encoded_in_target = 0
        code_type = "caffe.PriorBoxParameter.CENTER_SIZE"
        share_location = 1
        nms_threshold = attrs.float("nms_threshold", 0.5)
        confidence_threshold = attrs.float("threshold", 0.01)
        background_label_id = 0
        clip = 0 if not attrs.bool("clip", True) else 1

        node_attrs = {
            'type': 'DetectionOutput',
            'op': __class__.op,
            'num_classes': num_classes,
            'keep_top_k': keep_top_k,
            'variance_encoded_in_target': variance_encoded_in_target,
            'code_type': code_type,
            'share_location': share_location,
            'confidence_threshold': confidence_threshold,
            'background_label_id': background_label_id,
            'nms_threshold': nms_threshold,
            'top_k': top_k,
            'decrease_label_id': 1,
            'clip_before_nms': clip,
            'normalized': 1,
        }

        DetectionOutput.update_node_stat(node, node_attrs)

        return cls.enabled
Пример #7
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        spatial_scale = attrs.float('spatial_scale', None)
        group_size = attrs.int('group_size', 0)
        no_trans = attrs.bool('no_trans', False)
        trans_std = attrs.float('trans_std', 0)
        output_dim = attrs.int('output_dim', 0)
        part_size = attrs.int('part_size', 0)
        sample_per_part = attrs.int('sample_per_part', 1)
        pooled_size = attrs.int('pooled_size', 0)

        data = {
            'spatial_scale': spatial_scale,
            'mode': 'bilinear_deformable',
            'group_size': group_size,
            'output_dim': output_dim,
            'trans_std': trans_std,
            'part_size': part_size,
            'spatial_bins_x': sample_per_part,
            'spatial_bins_y': sample_per_part,
            'pooled_width': pooled_size,
            'pooled_height': pooled_size,
        }

        # update the attributes of the node
        if not node.graph.graph['cmd_params'].generate_experimental_IR_V10:
            data.update({'no_trans': no_trans})
            PSROIPoolingOp.update_node_stat(node, data)
        else:
            DeformablePSROIPoolingOp.update_node_stat(node, data)
        return cls.enabled
Пример #8
0
 def extract(cls, einsum_node):
     einsum_name = einsum_node.soft_get('name', einsum_node.id)
     attrs = get_mxnet_layer_attrs(einsum_node.symbol_dict)
     equation = attrs.str('subscripts')
     normalized_equation = Einsum.normalize_equation(einsum_name, equation)
     Einsum.update_node_stat(einsum_node, {'equation': normalized_equation})
     return cls.enabled
Пример #9
0
 def extract(cls, node: Node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     MXRepeat.update_node_stat(node, {
         "axis": attrs.int('axis', 0),
         "repeats": attrs.int('repeats', 0),
     })
     return cls.enabled
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        axes = list(attrs.tuple("axes", int, []))
        node_attrs = {'axes': axes}

        # update the attributes of the node
        SliceLike.update_node_stat(node, node_attrs)
        return cls.enabled
Пример #11
0
    def extract(cls, node: Node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        Clamp.update_node_stat(node, {
            'min': attrs.float('a_min', None),
            'max': attrs.float('a_max', None),
        })
        return cls.enabled
Пример #12
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        Squeeze.update_node_stat(node, {
            'squeeze_dims': attrs.int("axis", None),
            'keep_at_least_1d': True
        })
        return __class__.enabled
Пример #13
0
    def extract(node: Node):
        attr = get_mxnet_layer_attrs(node.symbol_dict)
        node_attrs = {
            'epsilon': attr.float('eps', 0.001)
        }

        InstanceNormalization.update_node_stat(node, node_attrs)
        return __class__.enabled
Пример #14
0
 def extract(cls, node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     shift = list(attrs.tuple("shift", int, None))
     axis = None
     if attrs.has("axis"):
         axis = list(attrs.tuple("axis", int, None))
     AttributedRoll.update_node_stat(node, {'axes': axis, 'shift': shift})
     return cls.enabled
Пример #15
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        data = {
            'axis': attrs.int("dim", 1),
        }

        # update the attributes of the node
        Concat.update_node_stat(node, data)
        return cls.enabled
Пример #16
0
 def extract(self, node):
     supported = False
     op_attrs = None
     node_attrs = get_mxnet_layer_attrs(node.symbol_dict)
     op_type = node_attrs.str('op_type', None)
     if op_type and op_type in MXNetCustomFrontExtractorOp.registered_ops:
         supported, op_attrs = MXNetCustomFrontExtractorOp.registered_ops[
             op_type]().extract(node)
     return supported, op_attrs
Пример #17
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        output_size = attrs.tuple("output_size", int, None)
        if len(output_size) == 1:
            output_size = (output_size[0], output_size[0])

        data = {'op': 'Pooling', 'output_size': output_size}
        AdaptiveAvgPooling.update_node_stat(node, data)
        return cls.enabled
Пример #18
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        update_attrs = {'axis': attrs.int('axis', 0)}

        # update the attributes of the node
        PackOp.update_node_stat(node, update_attrs)

        return cls.enabled
Пример #19
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        axes = attrs.tuple("axes", int, [])
        offset = [0 for i in range(0, axes[-1])]
        node_attrs = {'axis': 1, 'offset': offset, 'dim': offset, 'axes': axes}

        # update the attributes of the node
        Crop.update_node_stat(node, node_attrs)
        return __class__.enabled
Пример #20
0
 def extract(cls, node):
     attr = get_mxnet_layer_attrs(node.symbol_dict)
     num_hidden = attr.int('num_hidden', None)
     assert num_hidden is not None, "{} node with no `num_hidden` parameter found".format(cls.op)
     attrs = {
         'out-size': num_hidden,
         'transpose_weights': True,
     }
     FullyConnected.update_node_stat(node, attrs)
     return cls.enabled
Пример #21
0
 def extract(cls, node: Node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     Range.update_node_stat(node, {
         'start': attrs.int('start', 0),
         'stop': attrs.int('stop', 0),
         'repeat': attrs.int('repeat', 1),
         'step': attrs.float('step', 1),
         'dtype': np.dtype(attrs.str('dtype ', 'float32'))
     })
     return cls.enabled
Пример #22
0
 def extract(node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     data = {
         'axis': [attrs.int('axis', 0)],
         'reduce_type': 'max',
         'keep_dims': False
     }
     # update the attributes of the node
     Reduce.update_node_stat(node, data)
     return __class__.enabled
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        update_attrs = {
            'axis': attrs.int('axis', 0),
            'mx_out_type': attrs.dtype('dtype', None)
        }

        MXNetCumSum.update_node_stat(node, update_attrs)
        return cls.enabled
Пример #24
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        axis = attrs.int("axis", 1)
        num_outputs = attrs.int("num_outputs", 0)

        node_attrs = {'axis': axis, 'num_split': num_outputs}

        # update the attributes of the node
        Split.update_node_stat(node, node_attrs)
        return __class__.enabled
Пример #25
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        node_attrs = {
            'crop_begin': np.array(attrs.tuple("begin", int, ())),
            'crop_end': np.array(attrs.tuple("end", int, ())),
            'step': np.array(attrs.tuple("step", int, ())),
        }

        MXSlice.update_node_stat(node, node_attrs)
        return cls.enabled
Пример #26
0
 def extract(cls, node):
     attrs = get_mxnet_layer_attrs(node.symbol_dict)
     offset = attrs.tuple("offset", int, ())
     axis = attrs.int("num_args", 0)
     node_attrs = {
         'axis': axis,
         'offset': list(offset),
         'dim': None,
     }
     Crop.update_node_stat(node, node_attrs)
     return cls.enabled
Пример #27
0
    def extract(cls, node: Node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        axis = attrs.int('axis', 0)
        repeats = attrs.int('repeats', None)
        assert repeats is not None and repeats > 0, \
            '`repeat` op requires positive `repeats` attribute, but it is {} for node {}'.format(repeats, node.name)

        MXRepeat.update_node_stat(node, {
            'axis': axis,
            'repeats': repeats,
        })
        return cls.enabled
Пример #28
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        node_attrs = {
            'type': 'Resample',
            'factor': attrs.int("scale", 1),
            'resample_type': 'caffe.ResampleParameter.NEAREST',
            'antialias': 0
        }
        # update the attributes of the node
        ResampleOp.update_node_stat(node, node_attrs)
        return __class__.enabled
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        update_attrs = {
            'type': 'SoftMax',
            'axis': attrs.int("axis", -1),
            'temperature': attrs.float('temperature', 1.0)
        }

        # update the attributes of the node
        Softmax.update_node_stat(node, update_attrs)
        return cls.enabled
Пример #30
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        kernel = attrs.tuple("kernel", int, None)
        stride = attrs.tuple("stride", int,
                             tuple(np.ones(len(kernel), dtype=np.int64)))
        padding = attrs.tuple("pad", int,
                              tuple(np.zeros(len(kernel), dtype=np.int64)))
        method = attrs.str("pool_type", None)
        rt = 'floor'

        data = {
            'window':
            np.array([1, 1, *[k for k in kernel]], dtype=np.int64),
            'stride':
            np.array([1, 1, *[s for s in stride]], dtype=np.int64),
            'pad':
            np.array([[0, 0], [0, 0], *[[pad, pad] for pad in padding]],
                     dtype=np.int64),
            'pad_spatial_shape':
            np.array([[pad, pad] for pad in padding], dtype=np.int64),
            'pool_method':
            method,
            'exclude_pad':
            'false',
            'output_spatial_shape':
            None,
            'spatial_dims':
            None,
            'channel_dims':
            np.array([1], dtype=np.int64),
            'batch_dims':
            np.array([0], dtype=np.int64),
            'layout':
            'NCHW',
            'rounding_type':
            rt,
        }

        pooling_conv = attrs.str("pooling_convention", 'valid')
        if pooling_conv:
            data["pooling_convention"] = pooling_conv
            if pooling_conv == 'full':
                data["rounding_type"] = 'ceil'

        global_pool = attrs.bool("global_pool", False)
        if global_pool:
            data["global_pool"] = global_pool

        # update the attributes of the node
        Pooling.update_node_stat(node, data)
        return __class__.enabled