Exemplo n.º 1
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        act_type = attrs.str('act_type', 'leaky')
        if act_type == 'prelu':
            prelu_attrs = {
                'channel_shared': 1,
                'filler_type': 'constant',
                'filler_value': 0,
                'min': 0,
                'max': 1,
                'mean': 0,
                'std': 0,
                'sparse': -1,
                'variance_norm': "caffe.FillerParameter.FAN_IN"
            }
            PReLU.update_node_stat(node, prelu_attrs)
        elif act_type == 'elu':
            alpha = attrs.float('slope', 0.25)
            Elu.update_node_stat(node, {'alpha': alpha})
        elif act_type == 'leaky':
            negative_slope = attrs.float('slope', 0.25)
            if negative_slope == 0:
                ReLU.update_node_stat(node)
            else:
                LeakyReLU.update_node_stat(node,
                                           {'negative_slope': negative_slope})
        else:
            raise Error(
                "Operation '{}' not supported. Please register it as custom op. "
                + refer_to_faq_msg(86), act_type)

        return LeakyReLUFrontExtractor.enabled
Exemplo n.º 2
0
 def extract(node):
     negative_slope = node.pb.attr['alpha'].f
     if negative_slope == 0:
         ReLU.update_node_stat(node)
     else:
         LeakyReLU.update_node_stat(node, {'negative_slope': negative_slope})
     return __class__.enabled
Exemplo n.º 3
0
 def extract(cls, node):
     negative_slope = onnx_attr(node, 'alpha', 'f', default=1.0)
     if negative_slope == 0:
         ReLU.update_node_stat(node)
     else:
         LeakyReLU.update_node_stat(node, {'negative_slope': negative_slope})
     return cls.enabled
Exemplo n.º 4
0
    def replace_pattern(self, graph: Graph, match: dict):
        mul_node = match['mul_op']
        const_node = match['const_op']
        max_node = match['max_op']
        max_name = max_node.soft_get('name', max_node.id)

        const_value = const_node.out_port(0).data.get_value()
        if const_value is None or const_value.size != 1:
            log.debug(
                'Mul layer "{}" can not participate in conversion to the LeakyReLU because constant "{}" '
                'contains more than one element: {}'.format(
                    mul_node.id, const_node.id, const_value.size))
            return

        # Create new LeakyReLU operation
        leaky_relu_node = LeakyReLU(
            graph, dict(negative_slope=const_value.item(0))).create_node()

        data_in_port = int(
            mul_node.in_port(0).get_source().node.type == 'Const')
        mul_node.in_port(data_in_port).get_source().connect(
            leaky_relu_node.in_port(0))
        max_node.out_port(0).get_connection().set_source(
            leaky_relu_node.out_port(0))

        rename_nodes([(max_node, max_name + '/TBR'),
                      (leaky_relu_node, max_name)])

        log.debug(
            'Successful conversion from {} {} to ReLU with negative slope (leaky ReLU)'
            ''.format(max_node.id, mul_node.id))
Exemplo n.º 5
0
 def extract(node):
     assert node.pb, 'Protobuf layer can not be empty'
     param = node.pb.relu_param
     negative_slope = param.negative_slope
     if negative_slope == 0:
         ReLU.update_node_stat(node)
     else:
         LeakyReLU.update_node_stat(node, {'negative_slope': negative_slope})
     return True
Exemplo n.º 6
0
def _convert_to_leaky_relu_action(graph: Graph, matches: dict):
    """
    This function checks given pattern and if pattern satisfies all requirements, converts to ReLU with negative slope
    """
    mul_op = matches['mul_op']
    mul_value_data = matches['const_data']
    mul_data = matches['mul_data']
    input_data = matches['data']
    max_op = matches['max_op']
    max_data = max_op.out_node()

    # Check that all nodes satisfies conversion requirements
    if len(max_op.in_nodes()) > 2:
        log.debug(
            'Maximum layer ({}) can not participate in conversion to leaky ReLU due to it has more than two '
            'inputs ({})'.format(max_op.id, len(max_op.in_nodes())))
        return

    if mul_value_data.has_valid('value') and mul_value_data.value.size != 1:
        log.debug(
            'Mul layer ({}) can not participate in conversion to leaky ReLU due to value {}'
            ''.format(mul_op.id, mul_value_data.soft_get('value')))
        return

    value = mul_value_data.value.item(0)

    if len(mul_data.out_nodes()) > 1:
        log.debug(
            'Mul layer({}) can not participate in conversion to leaky ReLU due to it has more than one consumer'
            ''.format(mul_op.id))
        return

    # Disconnect data nodes from ops
    graph.remove_edge(max_op.id, max_data.id)
    graph.remove_edge(input_data.id, mul_op.id)
    graph.remove_edge(input_data.id, max_op.id)

    # Create new ReLU operation
    relu_op = LeakyReLU(graph, dict(name="LeakyReLU_", negative_slope=value))
    relu_op.create_node_with_data(inputs=[input_data], data_nodes=max_data)

    log.debug(
        'Successful conversion from {} {} to ReLU with negative slope (leaky ReLU)'
        ''.format(max_op.id, mul_op.id))