コード例 #1
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        act_type = attrs.str('act_type', 'leaky')
        if act_type == 'prelu':
            prelu_attrs = {
                'channel_shared': 1,
                'filler_type': 'constant',
                'filler_value': 0,
                'min': 0,
                'max': 1,
                'mean': 0,
                'std': 0,
                'sparse': -1,
                'variance_norm': "caffe.FillerParameter.FAN_IN"
            }
            PreluOp.update_node_stat(node, prelu_attrs)
        elif act_type == 'elu':
            Activation.update_node_stat(node, {'operation': act_type})
        elif act_type == 'leaky':
            negative_slope = attrs.float('slope', 0.25)
            ReLU.update_node_stat(node, {'negative_slope': negative_slope})
        else:
            raise Error(
                "Operation '{}' not supported. Please register it as custom op. "
                + refer_to_faq_msg(86), act_type)

        return LeakyReLUFrontExtractor.enabled
コード例 #2
0
def _convert_to_leaky_relu_action(graph: nx.MultiDiGraph, matches: dict):
    """
    This function checks given patten and if pattern satisfies all requirements, converts to ReLU with negative slope
    """
    power_op = matches['power_op']
    power_data = matches['power_data']
    input_data = matches['data']
    eltwise_op = matches['eltwise_op']
    eltwise_data = eltwise_op.out_node()

    # Check that all nodes satisfies conversion requirements
    if len(eltwise_op.in_nodes()) > 2:
        log.debug(
            'Eltwise layer ({}) can not participate in conversion to leaky ReLU due to it has more than two '
            'inputs ({})'.format(eltwise_op.id, len(eltwise_op.in_nodes())))
        return

    if eltwise_op.soft_get('operation') != 'max':
        log.debug(
            'Eltwise layer ({}) can not participate in conversion to leaky ReLU due to it has not satisfied '
            'operation type ({}) should be max'.format(
                eltwise_op.id, eltwise_op.soft_get('operation')))
        return

    if not (power_op.has_valid('scale') and power_op.has_valid('power')
            and power_op.has_valid('shift')):
        log.debug(
            'Power layer ({}) can not participate in conversion to leaky ReLU due to missing attribute (scale, '
            'power or shift)'.format(power_op.id))
        return

    if power_op.scale > 1 or power_op.power != 1 or power_op.shift != 0:
        log.debug(
            'Power layer ({}) can not participate in conversion to leaky ReLU due to wrong parameters(Scale = {} '
            '(should be < 1), Power {} (should be = 1), Shift {} (should be = 0))'
            ''.format(power_op.id, power_op.scale, power_op.power,
                      power_op.shift))
        return

    if len(power_data.out_nodes()) > 1:
        log.debug(
            'Power layer({}) can not participate in conversion to leaky ReLU due to it has more than one consumer'
            ''.format(power_op.id))
        return

    # Disconnect data nodes from ops
    graph.remove_edge(eltwise_op.id, eltwise_data.id)
    graph.remove_edge(input_data.id, power_op.id)
    graph.remove_edge(input_data.id, eltwise_op.id)

    # Create new ReLU operation
    relu_op = ReLU(
        graph, dict(name="LeakyReLU_",
                    negative_slope=np.array(power_op.scale)))
    relu_op.create_node_with_data(inputs=[input_data], data_nodes=eltwise_data)

    log.debug(
        'Successful conversion from {} {} to ReLU with negative slope (leaky ReLU)'
        ''.format(eltwise_op.id, power_op.id))
コード例 #3
0
ファイル: activation.py プロジェクト: pc2/CustoNN2
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        act_type = attrs.str('act_type', 'relu')
        if act_type == 'sigmoid' or act_type == 'tanh':
            Activation.update_node_stat(node, {'operation': act_type})
        elif act_type == 'relu':
            ReLU.update_node_stat(node)
        else:
            raise Error(
                "Operation '{}' not supported. Please register it as custom op. "
                + refer_to_faq_msg(86), act_type)

        return ActivationFrontExtractor.enabled
コード例 #4
0
 def extract(node):
     ReLU.update_node_stat(node, {})
     return __class__.enabled
コード例 #5
0
ファイル: relu.py プロジェクト: pc2/CustoNN2
 def extract(node):
     ReLU.update_node_stat(node)
     return ReLUFrontExtractor.enabled
コード例 #6
0
ファイル: leaky_relu_ext.py プロジェクト: pc2/CustoNN2
 def extract(node):
     negative_slope = onnx_attr(node, 'alpha', 'f', default=1.0)
     ReLU.update_node_stat(node, {'negative_slope': negative_slope})
     return LeakyReLUFrontExtractor.enabled