Пример #1
0
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        act_type = attrs.str('act_type', 'leaky')
        if act_type == 'prelu':
            prelu_attrs = {
                'channel_shared': 1,
                'filler_type': 'constant',
                'filler_value': 0,
                'min': 0,
                'max': 1,
                'mean': 0,
                'std': 0,
                'sparse': -1,
                'variance_norm': "caffe.FillerParameter.FAN_IN"
            }
            PReLU.update_node_stat(node, prelu_attrs)
        elif act_type == 'elu':
            alpha = attrs.float('slope', 0.25)
            Elu.update_node_stat(node, {'alpha': alpha})
        elif act_type == 'leaky':
            negative_slope = attrs.float('slope', 0.25)
            if negative_slope == 0:
                ReLU.update_node_stat(node)
            else:
                LeakyReLU.update_node_stat(node,
                                           {'negative_slope': negative_slope})
        else:
            raise Error(
                "Operation '{}' not supported. Please register it as custom op. "
                + refer_to_faq_msg(86), act_type)

        return LeakyReLUFrontExtractor.enabled
Пример #2
0
 def extract(node):
     negative_slope = node.pb.attr['alpha'].f
     if negative_slope == 0:
         ReLU.update_node_stat(node)
     else:
         LeakyReLU.update_node_stat(node, {'negative_slope': negative_slope})
     return __class__.enabled
Пример #3
0
 def extract(cls, node):
     negative_slope = onnx_attr(node, 'alpha', 'f', default=1.0)
     if negative_slope == 0:
         ReLU.update_node_stat(node)
     else:
         LeakyReLU.update_node_stat(node, {'negative_slope': negative_slope})
     return cls.enabled
Пример #4
0
 def extract(node):
     assert node.pb, 'Protobuf layer can not be empty'
     param = node.pb.relu_param
     negative_slope = param.negative_slope
     if negative_slope == 0:
         ReLU.update_node_stat(node)
     else:
         LeakyReLU.update_node_stat(node, {'negative_slope': negative_slope})
     return True
 def extract(cls, node):
     ReLU.update_node_stat(node, {})
     return cls.enabled
 def extract(node):
     ReLU.update_node_stat(node, {})
     return __class__.enabled
Пример #7
0
 def extract(cls, node):
     ReLU.update_node_stat(node)
     return ReLUFrontExtractor.enabled