コード例 #1
0
 def test_activation_elu_infer(self):
     graph = build_graph(
         self.nodes_attributes, [('node_1', 'activation_node'),
                                 ('activation_node', 'node_3')], {
                                     'node_1': {
                                         'value': np.array([6, -4, -2, -1])
                                     },
                                     'activation_node': {
                                         'operation': 'elu',
                                         'alpha': 1.0,
                                     },
                                     'node_3': {
                                         'value': None
                                     }
                                 })
     graph.graph['layout'] = 'NCHW'
     activation_node = Node(graph, 'activation_node')
     Elu.infer(activation_node)
     exp_shape = np.array([4])
     res_shape = graph.node['node_3']['shape']
     res_value = graph.node['node_3']['value']
     exp_value = np.array([6., -0.98168436, -0.86466472, -0.63212056])
     for i, value in enumerate(exp_shape):
         self.assertEqual(res_shape[i], value)
     for i, value in enumerate(exp_value):
         self.assertAlmostEqual(res_value[i], value)
コード例 #2
0
ファイル: leaky_relu.py プロジェクト: mikhailk62/openvino
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        act_type = attrs.str('act_type', 'leaky')
        if act_type == 'prelu':
            prelu_attrs = {'channel_shared': 1,
                           'filler_type': 'constant',
                           'filler_value': 0,
                           'min': 0,
                           'max': 1,
                           'mean': 0,
                           'std': 0,
                           'sparse': -1,
                           'variance_norm': "caffe.FillerParameter.FAN_IN"}
            PReLU.update_node_stat(node, prelu_attrs)
        elif act_type == 'elu':
            alpha = attrs.float('slope', 0.25)
            Elu.update_node_stat(node, {'alpha': alpha})
        elif act_type == 'leaky':
            negative_slope = attrs.float('slope', 0.25)
            if negative_slope == 0:
                ReLU.update_node_stat(node)
            else:
                LeakyReLU.update_node_stat(node, {'negative_slope': negative_slope})
        elif act_type == 'gelu':
            GeLUOP.update_node_stat(node, {'approximation_mode': 'erf'})
        else:
            raise Error(
                "Operation '{}' not supported. Please register it as custom op. " +
                refer_to_faq_msg(86),
                act_type)

        return LeakyReLUFrontExtractor.enabled
コード例 #3
0
ファイル: activation_ext.py プロジェクト: mikhailk62/openvino
 def extract(cls, node):
     Elu.update_node_stat(node)
     return cls.enabled
コード例 #4
0
    def extract(cls, node):
        param = node.pb.elu_param
        attrs = collect_attributes(param)

        Elu.update_node_stat(node, attrs)
        return cls.enabled