Example #1
0
    def extract(node):
        param = node.pb.elu_param
        attrs = collect_attributes(param)
        attrs['operation'] = 'elu'

        Activation.update_node_stat(node, attrs)
        return ELUFrontExtractor.enabled
Example #2
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        act_type = attrs.str('act_type', 'leaky')
        if act_type == 'prelu':
            prelu_attrs = {
                'channel_shared': 1,
                'filler_type': 'constant',
                'filler_value': 0,
                'min': 0,
                'max': 1,
                'mean': 0,
                'std': 0,
                'sparse': -1,
                'variance_norm': "caffe.FillerParameter.FAN_IN"
            }
            PreluOp.update_node_stat(node, prelu_attrs)
        elif act_type == 'elu':
            Activation.update_node_stat(node, {'operation': act_type})
        elif act_type == 'leaky':
            negative_slope = attrs.float('slope', 0.25)
            ReLU.update_node_stat(node, {'negative_slope': negative_slope})
        else:
            raise Error(
                "Operation '{}' not supported. Please register it as custom op. "
                + refer_to_faq_msg(86), act_type)

        return LeakyReLUFrontExtractor.enabled
Example #3
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        act_type = attrs.str('act_type', 'relu')
        if act_type == 'sigmoid' or act_type == 'tanh':
            Activation.update_node_stat(node, {'operation': act_type})
        elif act_type == 'relu':
            ReLU.update_node_stat(node)
        else:
            raise Error(
                "Operation '{}' not supported. Please register it as custom op. "
                + refer_to_faq_msg(86), act_type)

        return ActivationFrontExtractor.enabled
Example #4
0
 def extract(node):
     Activation.update_node_stat(node, {'operation': 'tanh'})
     return Tanh.enabled
Example #5
0
 def extract(node):
     alpha = onnx_attr(node, 'alpha', 'f', default=1.0)
     Activation.update_node_stat(node, {'operation': 'elu', 'alpha': alpha})
     return EluFrontExtractor.enabled
Example #6
0
 def extract(node):
     Activation.update_node_stat(node, {'operation': 'sigmoid'})
     return Sigmoid.enabled
Example #7
0
 def extract(node):
     Activation.update_node_stat(node, {'operation': 'relu6'})
     return True
Example #8
0
 def replace_pattern(self, graph: Graph, match: [str, Node]):
     node = match['activation']
     Activation.update_node_stat(node, dict(operation=node.type.lower()))
Example #9
0
 def extract(node):
     Activation.update_node_stat(node, {'operation': 'elu'})
     return Elu.enabled
Example #10
0
 def extract(node):
     Activation.update_node_stat(node, {'operation': 'exp'})
     return __class__.enabled