Ejemplo n.º 1
0
    def __init__(self,
                 in_ch,
                 out_ch,
                 kernel_size,
                 stride=(1, 1),
                 padding='VALID',
                 dilation=(1, 1),
                 bias=True):
        super(Conv2d, self).__init__()
        self.in_ch = in_ch
        self.out_ch = out_ch
        self.kernel_size = utils.pair(kernel_size)
        self.stride = utils.pair(stride)
        assert self.stride[0] > 0 and self.stride[1] > 0,\
            'stride must be lt 0'
        if isinstance(padding, str):
            self.padding = padding
        else:
            self.padding = utils.pair(padding)

        assert self.stride[0] <= self.kernel_size[0] or self.stride[0] <= self.kernel_size[1],\
            'stride must be le kernel_size'
        self.weight = t.Tensor(
            (out_ch, in_ch, self.kernel_size[0], self.kernel_size[1]),
            requires_grad=True)
        self.bias = t.Tensor((out_ch, 1), requires_grad=True)

        if not bias:
            self.bias = None
        self.reset_parameters()
Ejemplo n.º 2
0
 def __next__(self):
     if self.iterated_num * self.batch_size < self.len:
         if self.start + self.batch_size > self.len:
             next_start = 0
             end = self.len
         else:
             next_start = end = self.start + self.batch_size
         data = None
         labels = None
         for i in range(self.start, end):
             if data is None:
                 data, labels = self.dataset[i][0].data, self.dataset[i][
                     1].data
             else:
                 data = np.concatenate((data, self.dataset[i][0].data),
                                       axis=0)
                 labels = np.concatenate((labels, self.dataset[i][1].data),
                                         axis=0)
         data = t.Tensor(data)
         labels = t.Tensor(labels)
         self.start = next_start
         self.iterated_num += 1
         return data, labels
     else:
         self.iterated_num = 0
         raise StopIteration
Ejemplo n.º 3
0
 def __init__(self, in_features, out_features, bias=True):
     """
     Notes
     -----
     self.weight and self.bias store parameters to train
     """
     super(Linear, self).__init__()
     self.in_features = in_features
     self.out_features = out_features
     self.weight = t.Tensor((in_features, out_features), requires_grad=True)
     if bias:
         self.bias = t.Tensor((1, out_features), requires_grad=True)
     self.reset_parameters()
Ejemplo n.º 4
0
 def __call__(self, *args):
     self.raw_inputs = list(args)
     for i, arg in enumerate(args):
         if arg is None:
             self.raw_inputs.pop(i)
     for arg in self.raw_inputs:
         if isinstance(arg, t.Tensor):
             self.inputs.append(arg.data)
         else:
             self.inputs.append(arg)
     data = self.forward(*self.inputs)
     requires_grad = any(
         list(
             map(
                 lambda x: x.requires_grad
                 if hasattr(x, 'requires_grad') else False,
                 self.raw_inputs)))
     grad_fns = []
     backward_names = get_backward_names(self)
     for backward_name in backward_names:
         back_name, oprand = backward_name.split('_')
         oprand = int(oprand)
         # wrap grad_fn by broadcast
         fn = backward(self, getattr(self, backward_name))
         grad_fn = GradFn(self.__class__.__name__ + back_name[0].upper() + back_name[1:] + str(oprand),\
             self.raw_inputs[oprand], fn)
         self.raw_inputs[oprand].grad_fn = grad_fn
         grad_fns.append(grad_fn)
     output_tensor = t.Tensor(data, requires_grad=requires_grad)
     output_tensor._node.register_grad_fns(grad_fns)
     return output_tensor
Ejemplo n.º 5
0
 def transform(self, data):
     """
     Parameters
     ----------
     data : PIL.Image.open function returned value
         format is [height, width, channels]
     """
     data = np.array(data)
     if data.ndim == 2:
         data = data.reshape((-1, 1) + data.shape)
     if data.ndim == 3:
         data = data.reshape((-1, ) + data.shape)
         data = np.transpose(data, (0, 3, 1, 2))
     return t.Tensor(data)
Ejemplo n.º 6
0
 def backward(self, grad=None):
     """
     Parameters
     ----------
     grad : Tensor
     """
     tensor = self.tensor
     if grad is None:
         if self.tensor.shape == ():
             grad = t.Tensor([1.0])
         else:
             assert False, 'need input a grad tensor when tensor is not a scalar'
     if tensor.requires_grad:
         tensor.grad.data = tensor.grad.data + grad.data
     for grad_fn in tensor._node.grad_fns:
         if t.debug.BACKWARD_TRACE:
             print(grad_fn)
         output_grad = grad_fn(grad.data)
         grad_fn.tensor.backward(output_grad)
Ejemplo n.º 7
0
 def backward_with_broadcast(grad):
     oprand = int(wrap.__name__.split('_')[1])
     ret = t.Tensor(o.broadcast(wrap(grad), oprand))
     return ret
Ejemplo n.º 8
0
 def __init__(self, gamma=1.0, beta=0.0, momentum=0.1, train=True):
     super(BatchNorm2d, self).__init__()
     self.gamma = t.Tensor(gamma, requires_grad=True)
     self.beta = t.Tensor(beta, requires_grad=True)
     self.momentum = momentum
     self.training = train
Ejemplo n.º 9
0
def ensure_tensor(data):
    if isinstance(data, t.Tensor):
        return data
    return t.Tensor(data)