Exemplo n.º 1
0
class Linear(Layer):
    """
    Dense layer with optional bias weights
    Simple weights init weighted by input dimensions
    """
    def __init__(self, n_inputs, n_outputs, bias=True):
        super().__init__()

        self.use_bias = bias

        W = np.random.randn(n_inputs, n_outputs) * np.sqrt(2.0 / n_inputs)
        self.weight = Tensor(W, autograd=True)

        if self.use_bias:
            self.bias = Tensor(np.zeros(n_outputs), autograd=True)

        self.parameters.append(self.weight)

        if self.use_bias:
            self.parameters.append(self.bias)

    def forward(self, input_matrix):
        """ Input dot Weights + bias"""
        if self.use_bias:
            return input_matrix.dot(self.weight) + self.bias.expand(
                0, len(input_matrix.data))
        return input_matrix.dot(self.weight)
Exemplo n.º 2
0
class Linear(LayerCore):
    def __init__(self, n_inputs, n_outputs):
        super(Linear, self).__init__()
        W = np.random.rand(n_inputs, n_outputs) * np.sqrt(2.0 / n_inputs)
        self.weight = Tensor(W, requires_grad = True)
        self.bias = Tensor(np.zeros(n_outputs), requires_grad = True)
        self.parameters.append(self.weight)
        self.parameters.append(self.bias)
    def __call__(self, input: Tensor) -> Tensor:
        n_samples = input.data.shape[0]
        return input.mm(self.weight) + self.bias.expand(repeat = n_samples, axis = 0)