Beispiel #1
0
def test():
    l = RecurrentLayer(3, 2, ReluActivator(), 1e-3)
    x, d = data_set()
    l.forward(x[0])
    l.forward(x[1])
    l.backward(d, ReluActivator())
    return l
def test():
    x, d = data_set()
    model = RecurrentLayer(3, 2, ReluActivator(), 1e-3)
    model.forward(x[0])
    model.forward(x[1])
    model.backward(d, ReluActivator())

    return model, x, d
def test():
    x, d = data_set()
    l = RecurrentLayer(3, 2, ReluActivator(), 1e-3)
    print "X:", x, "\ny:", d
    l.forward(x[0])
    l.forward(x[1])
    l.backward(d, ReluActivator())
    print "activation:", l.activator, "\ndelta_list", l.delta_list, "\ngradient_list:", l.gradient_list, "\nstate_list:", l.state_list, "\nW:", l.W, "\nU:", l.U
    return l
Beispiel #4
0
 def __init__(self, state_size, action_size, seed):
     
     super(QNetwork, self).__init__()   # the line you give simply calls the __init__ method of ClassNames parent class.
     self.seed = seed.random(seed)
     self.fc1 = FCLayer(state_size, 256,ReluActivator(),learning_rate)
     self.fc2 = FCLayer(256,128,ReluActivator(),learning_rate)
     self.fc3 = FCLayer(128,64,ReluActivator(),learning_rate)
     self.out = FCLayer(64, action_size,IdentityActivator() ,learning_rate)
     self.data = [self.fc1.parameters,self.fc2.parameters,self.fc3.parameters,self.out.parameters]
Beispiel #5
0
def test():
    print("RecurrentLayer Test!")
    layer = RecurrentLayer(3, 2, ReluActivator(), 1e-3)
    x, d = data_set()
    # d => sensitivity_array
    layer.forward(x[0])
    layer.forward(x[1])
    layer.backward(d, ReluActivator())
    return layer
Beispiel #6
0
def test():
    l = RecurrentLayer(3, 2, ReluActivator(), 1e-3)
    x, d = data_set()
    l.forward(x[0])
    l.forward(x[1])
    l.backward(d, ReluActivator())
    print(l.state_list)
    print(l.delta_list)
    print(l.gradient)
    return l
Beispiel #7
0
    def __init__(self,
                 conv1_filter_number=6,
                 conv2_filter_number=16,
                 hide_dense_units=100):
        '''
        构造函数
        '''
        # Convolutional Layer #1
        self.conv1 = ConvLayer(input_width=28,
                               input_height=28,
                               channel_number=1,
                               filter_width=5,
                               filter_height=5,
                               filter_number=conv1_filter_number,
                               zero_padding=2,
                               stride=1,
                               activator=ReluActivator(),
                               learning_rate=0.001)
        # Pooling Layer #1
        self.pool1 = MaxPoolingLayer(input_width=28,
                                     input_height=28,
                                     channel_number=conv1_filter_number,
                                     filter_width=2,
                                     filter_height=2,
                                     stride=2)
        # Convolutional Layer #2 and Pooling Layer #2
        self.conv2 = ConvLayer(input_width=14,
                               input_height=14,
                               channel_number=conv1_filter_number,
                               filter_width=5,
                               filter_height=5,
                               filter_number=conv2_filter_number,
                               zero_padding=2,
                               stride=1,
                               activator=ReluActivator(),
                               learning_rate=0.001)
        self.pool2 = MaxPoolingLayer(input_width=14,
                                     input_height=14,
                                     channel_number=conv2_filter_number,
                                     filter_width=2,
                                     filter_height=2,
                                     stride=2)

        self.conv2_filter_number = conv2_filter_number
        # Dense Layer
        self.dense = FullConnectedLayer(input_size=7 * 7 * conv2_filter_number,
                                        output_size=hide_dense_units,
                                        activator=SigmoidActivator())
        # Logits Layer
        self.logits = FullConnectedLayer(input_size=hide_dense_units,
                                         output_size=10,
                                         activator=SigmoidActivator())
Beispiel #8
0
 def calc_gradient(self, label):
     label_array = np.array(label).reshape(self.logits.output.shape)
     delta = np.zeros(self.logits.output.shape)
     for k in range(len(delta)):
         delta[k] = -self.logits.activator.backward(
             self.logits.output[k]) * (label_array[k] -
                                       self.logits.output[k])
     delta = self.logits.backward(delta, SigmoidActivator())
     delta = self.dense.backward(delta, ReluActivator())
     # Flat to conv input
     delta = delta.reshape(-1, self.conv2_filter_number, 7, 7)
     delta = self.pool2.backward(delta)
     delta = self.conv2.backward(delta)
     delta = self.pool1.backward(delta)
     delta = self.conv1.backward(delta)
     return delta