def test_relu(self): X = np.array([0.8, -1.2, 3.3, -3.6, -0.5, 0.5]).reshape(3, 2).astype(np.float32) XT = np.array([0.8, 0, 3.3, 0, 0, 0.5]).reshape(3, 2).astype(np.float32) x = tensor.from_numpy(X) x.to_device(gpu_dev) y = autograd.ReLU()(x)[0] # frontend model = sonnx.to_onnx([x], [y]) # print('The model is:\n{}'.format(model)) # backend sg_ir = sonnx.prepare(model, device=gpu_dev) y_t = sg_ir.run([x]) np.testing.assert_array_almost_equal(tensor.to_numpy(y), tensor.to_numpy(y_t[0]), decimal=5)
def __init__(self, in_filters, out_filters, reps, strides=1, padding=0, start_with_relu=True, grow_first=True): super(Block, self).__init__() if out_filters != in_filters or strides != 1: self.skip = autograd.Conv2d(in_filters, out_filters, 1, stride=strides, padding=padding, bias=False) self.skipbn = autograd.BatchNorm2d(out_filters) else: self.skip = None self.layers = [] filters = in_filters if grow_first: self.layers.append(autograd.ReLU()) self.layers.append( autograd.SeparableConv2d(in_filters, out_filters, 3, stride=1, padding=1, bias=False)) self.layers.append(autograd.BatchNorm2d(out_filters)) filters = out_filters for i in range(reps - 1): self.layers.append(autograd.ReLU()) self.layers.append( autograd.SeparableConv2d(filters, filters, 3, stride=1, padding=1, bias=False)) self.layers.append(autograd.BatchNorm2d(filters)) if not grow_first: self.layers.append(autograd.ReLU()) self.layers.append( autograd.SeparableConv2d(in_filters, out_filters, 3, stride=1, padding=1, bias=False)) self.layers.append(autograd.BatchNorm2d(out_filters)) if not start_with_relu: self.layers = self.layers[1:] else: self.layers[0] = autograd.ReLU() if strides != 1: self.layers.append(autograd.MaxPool2d(3, strides, padding + 1))