def __init__(self,
                 input_shape,
                 out_channel,
                 kernel_size,
                 stride,
                 learning_rate,
                 activate_func: str = None):
        self.in_h, self.in_w, self.in_channel = input_shape
        self.learning_rate = learning_rate
        self.out_channel = out_channel
        self.kernel_h, self.kernel_w = kernel_size
        self.stride_h, self.stride_w = stride
        # ignore padding
        assert (self.in_h - self.kernel_h) % self.stride_h == 0
        assert (self.in_w - self.kernel_w) % self.stride_w == 0
        self.out_h = (self.in_h - self.kernel_h) // self.stride_h + 1
        self.out_w = (self.in_w - self.kernel_w) // self.stride_w + 1

        self.filters = torch.randn(
            (self.kernel_h, self.kernel_w, self.in_channel, out_channel),
            dtype=floatX)
        self.biases = torch.randn((self.out_channel, ), dtype=floatX)
        self.filters_gradient = torch.empty(
            (self.kernel_h, self.kernel_w, self.in_channel, out_channel),
            dtype=floatX)

        if activate_func == 'relu':
            self.activation = other.Relu()
        elif activate_func == 'sigmoid':
            self.activation = other.Sigmoid(100)
        elif activate_func == 'tanh':
            self.activation = other.Tanh(100)
        else:
            self.activation = None
Example #2
0
    def __init__(self, input_shape, pool_size, activate_func: str = None) -> None:
        self.height, self.width, self.channel = input_shape
        self.pool_size = pool_size
        assert self.height % pool_size == 0
        assert self.width % pool_size == 0

        if activate_func == 'relu':
            self.activation = other.Relu()
        elif activate_func == 'sigmoid':
            self.activation = other.Sigmoid(100)
        elif activate_func == 'tanh':
            self.activation = other.Tanh(100)
        else:
            self.activation = None
    def __init__(self,
                 dim_in,
                 dim_out,
                 learning_rate,
                 activate_func: str = None) -> None:
        self.dim_in = dim_in
        self.dim_out = dim_out
        self.learning_rate = learning_rate
        self.params = torch.randn((dim_out, dim_in), dtype=floatX) / 500
        self.biases = torch.randn((dim_out, ), dtype=floatX) / 500

        if activate_func == 'relu':
            self.activation = other.Relu()
        elif activate_func == 'sigmoid':
            self.activation = other.Sigmoid(100)
        elif activate_func == 'tanh':
            self.activation = other.Tanh(100)
        elif activate_func == 'softmax':
            self.activation = other.Softmax()
        else:
            self.activation = None