示例#1
0
 def __init__(self, settings, net_settings=None):
     """
     :param settings: Full connected layer settings
     :type settings: FullConnectedLayerSettings
     """
     super(_FullConnectedLayer, self).__init__(settings, net_settings)
     self.relu = _ReluLayer(ReluLayerSettings(in_shape=self.settings.out_shape, activation=self.settings.activation))
示例#2
0
    def test_forward(self):
        arr = np.random.uniform(-5, 5, 48).reshape((4, 4, 3))
        s = ReluLayerSettings(in_shape=arr.shape, activation='max')
        l = _ReluLayer(s)
        res = l.forward(arr)

        print(arr[:, :, 0])
        print(arr[:, :, 1])
        print(arr[:, :, 2])
        print('-----------')
        print(res[:, :, 0])
        print(res[:, :, 1])
        print(res[:, :, 2])
示例#3
0
    def test_backward(self):
        arr = np.random.uniform(-5, 5, 48).reshape((4, 4, 3))
        s = ReluLayerSettings(in_shape=arr.shape, activation='max')
        l = _ReluLayer(s)
        l.prev_layer = _InputLayer(InputLayerSettings(in_shape=arr.shape))
        l.next_layer = object()

        e = np.random.uniform(-5, 5, 48).reshape(*s.out_shape)
        print(e[:, :, 0])
        print(e[:, :, 1])
        print(e[:, :, 2])
        print('-----------')

        res = l.prev_layer.forward(arr)
        res = l.forward(res)

        res = l.backward(e)

        print(res[:, :, 0])
        print(res[:, :, 1])
        print(res[:, :, 2])