def __init__(self): super().__init__() self.sigmoid = torch.nn.Sigmoid() self.hardsigmoid = torch.nn.Hardsigmoid() self.tanh = torch.nn.Tanh() self.quant = QuantStub() self.dequant = DeQuantStub()
def __init__(self): super(SubModule, self).__init__() self.qconfig = default_qconfig self.mod1 = torch.nn.Conv2d(3, 3, 3, bias=False).to(dtype=torch.float) self.mod2 = nn.ReLU() self.quant = QuantStub() self.dequant = DeQuantStub()
def __init__(self, *args: Any, **kwargs: Any) -> None: """ MobileNet V3 main class Args: Inherits args from floating point MobileNetV3 """ super().__init__(*args, **kwargs) self.quant = QuantStub() self.dequant = DeQuantStub()
def __init__(self): super(ModelWithFunctionals, self).__init__() self.mycat = nnq.FloatFunctional() self.myadd = nnq.FloatFunctional() self.mymul = nnq.FloatFunctional() self.myadd_relu = nnq.FloatFunctional() self.my_scalar_add = nnq.FloatFunctional() self.my_scalar_mul = nnq.FloatFunctional() self.quant = QuantStub() self.dequant = DeQuantStub()
def __init__(self, w, b, m, v): super(SimpleQuantizedBatchNormRelu, self).__init__() self.bn = torch.nn.BatchNorm3d(4) self.relu = torch.nn.ReLU() self.bn.weight = torch.nn.Parameter(w) self.bn.bias = torch.nn.Parameter(b) self.bn.running_mean = m self.bn.running_var = v self.q = QuantStub() self.dq = DeQuantStub()
def __init__(self): super().__init__() self.act = Act() self.quant = QuantStub() self.dequant = DeQuantStub()