Beispiel #1
0
    def __init__(self, tensor1, tensor2):
        super(Add, self).__init__(tensor1.value + tensor2.value)
        self.requires_grad = tensor1.requires_grad or tensor2.requires_grad

        if tensor1.requires_grad:
            back_channel = Node(tensor1, self.der_pos1)
            self.tensor1 = tensor1
            self.channels.append(back_channel)
        if tensor2.requires_grad:
            back_channel = Node(tensor2, self.der_pos2)
            self.tensor2 = tensor2
            self.channels.append(back_channel)
Beispiel #2
0
    def __init__(self, tensor1):
        super(Negate, self).__init__(-tensor1.value)
        self.requires_grad = tensor1.requires_grad

        if tensor1.requires_grad:
            back_channel = Node(tensor1, self.der_pos)
            self.channels.append(back_channel)
Beispiel #3
0
    def __init__(self, y_pred, y_target):
        super(LogLikelihood,
              self).__init__(value=-(y_target.value * torch.log(y_pred.value)))
        self.y_pred = y_pred
        self.y_target = y_target

        back_channel = Node(autoVariable=y_pred, vjp=self.der)
        self.channels.append(back_channel)
Beispiel #4
0
    def __init__(self, y_pred, y_target):
        super(MeanAbsoluteError,
              self).__init__(value=torch.abs(y_pred.value - y_target.value))
        self.y_pred = y_pred
        self.y_target = y_target

        back_channel = Node(autoVariable=y_pred, vjp=self.der)
        self.channels.append(back_channel)
Beispiel #5
0
    def __init__(self, y_pred, y_target):
        super(SquareError,
              self).__init__(value=0.5 * ((y_pred.value - y_target.value)**2))
        self.y_pred = y_pred
        self.y_target = y_target

        back_channel = Node(autoVariable=y_pred, vjp=self.der)
        self.channels.append(back_channel)
Beispiel #6
0
    def __init__(self, tensor: autoTensor, *idx):
        super(Transpose, self).__init__(self._transpose(tensor, *idx))
        self.requires_grad = tensor.requires_grad
        self.idx = list(idx)

        if tensor.requires_grad:
            back_channel = Node(tensor, self.der)
            self.channels.append(back_channel)
Beispiel #7
0
    def __init__(self, inputs, mask):
        super(Dpout, self).__init__(value=inputs.value * mask)
        self.requires_grad = inputs.requires_grad

        if self.requires_grad:
            self.mask = mask
            back_channel = Node(inputs, self.der)
            self.channels.append(back_channel)
Beispiel #8
0
    def __init__(self, tensor1):
        super(Exp, self).__init__(torch.exp(tensor1.value))
        self.requires_grad = tensor1.requires_grad

        if tensor1.requires_grad:
            back_channel = Node(tensor1, self.der_pos1)
            self.tensor1 = tensor1
            self.channels.append(back_channel)
Beispiel #9
0
    def __init__(self, y_pred, y_target):
        super(BinaryCrossEntropy, self).__init__(
            value=-(y_target.value * torch.log(y_pred.value) +
                    (1 - y_target.value) * torch.log(1 - y_pred.value)))
        self.y_pred = y_pred
        self.y_target = y_target

        back_channel = Node(autoVariable=y_pred, vjp=self.der)
        self.channels.append(back_channel)
Beispiel #10
0
    def __init__(self, inputs):
        super(Flatten2d,
              self).__init__(value=inputs.value.view(inputs.size()[0], -1))
        self.requires_grad = inputs.requires_grad

        if self.requires_grad:
            self.inputs_size = inputs.size()
            back_channel = Node(inputs, self.der)
            self.channels.append(back_channel)
Beispiel #11
0
    def __init__(self, tensor1, axis=0):
        super(Sum, self).__init__(tensor1.value.sum(dim=axis, keepdim=True))

        self.requires_grad = tensor1.requires_grad

        if tensor1.requires_grad:
            back_channel = Node(tensor1, self.der_pos1)
            self.shape = tensor1.value.size()
            self.channels.append(back_channel)
Beispiel #12
0
    def __init__(self, tensor1, pow_val):
        super(Power, self).__init__(tensor1.value**pow_val)

        self.requires_grad = tensor1.requires_grad

        if tensor1.requires_grad:
            back_channel = Node(tensor1, self.der_pos1)
            self.tensor1 = tensor1
            self.pow_val = pow_val
            self.channels.append(back_channel)
Beispiel #13
0
    def __init__(self, image_block, filters, bias, padding=0, stride=1):
        conv = F.conv2d(input=image_block.value,
                        weight=filters.value,
                        bias=bias.value,
                        stride=(stride, stride),
                        padding=(padding, padding))
        super(Conv2d, self).__init__(value=conv)

        self.requires_grad = image_block.requires_grad or filters.requires_grad or bias.requires_grad
        self.padding = padding
        self.stride = stride
        self.image_block = image_block
        self.filters = filters
        self.bias = bias

        if image_block.requires_grad:
            back_channel = Node(image_block, self.der_image)
            self.channels.append(back_channel)
        if filters.requires_grad:
            back_channel = Node(filters, self.der_filters)
            self.channels.append(back_channel)
        if bias.requires_grad:
            back_channel = Node(bias, self.der_bias)
            self.channels.append(back_channel)