def __init__(self, num_classes=10, classifier_activation='softmax'): super(LeNetDygraph, self).__init__() self.num_classes = num_classes self.features = nn.Sequential(nn.Conv2d(1, 6, 3, stride=1, padding=1), nn.ReLU(), nn.Pool2D(2, 'max', 2), nn.Conv2d(6, 16, 5, stride=1, padding=0), nn.ReLU(), nn.Pool2D(2, 'max', 2)) if num_classes > 0: self.fc = nn.Sequential(nn.Linear(400, 120), nn.Linear(120, 84), nn.Linear(84, 10), nn.Softmax()) #Todo: accept any activation
def __init__(self, block, layers, num_classes=1000): self.inplanes = 64 super(ResNet, self).__init__() self.conv1 = nn.Conv2D(3, 64, kernel_size=7, stride=2, padding=3, bias_attr=False) self.bn1 = nn.BatchNorm(64) self.relu = nn.ReLU() self.maxpool = nn.Pool2D(pool_size=3, pool_stride=2, pool_padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer(block, 128, layers[1], stride=2) self.layer3 = self._make_layer(block, 256, layers[2], stride=2) self.layer4 = self._make_layer(block, 512, layers[3], stride=2) self.avgpool = nn.Pool2D(7, pool_stride=1, pool_type='avg') self.fc = nn.Linear(512 * block.expansion, num_classes)
def __init__(self): super(Model, self).__init__() with supernet(kernel_size=(3, 5, 7), expand_ratio=[1, 2, 4]) as ofa_super: models = [] models += [nn.Conv2D(1, 6, 3)] models += [ReLU()] models += [nn.Pool2D(2, 'max', 2)] models += [nn.Conv2D(6, 16, 5, padding=0)] models += [ReLU()] models += [nn.Pool2D(2, 'max', 2)] models += [ nn.Linear(784, 120), nn.Linear(120, 84), nn.Linear(84, 10) ] models = ofa_super.convert(models) self.models = paddle.nn.Sequential(*models)
def __init__(self, ni: int, nf: int = None, scale: int = 2, blur: bool = False, norm_type='Weight', leaky: float = None): super().__init__() nf = ifnone(nf, ni) self.conv = conv_layer(ni, nf * (scale**2), ks=1, norm_type=norm_type, use_activ=False) self.shuf = PixelShuffle(scale) self.pad = ReplicationPad2d((1, 0, 1, 0)) self.blur = nn.Pool2D(2, pool_stride=1, pool_type='avg') self.relu = relu(True, leaky=leaky)
def __init__(self, ni: int, nf: int = None, scale: int = 2, blur: bool = False, leaky: float = None, **kwargs): super().__init__() nf = ifnone(nf, ni) self.conv = custom_conv_layer(ni, nf * (scale**2), ks=1, use_activ=False, **kwargs) self.shuf = PixelShuffle(scale) self.pad = ReplicationPad2d((1, 0, 1, 0)) self.blur = nn.Pool2D(2, pool_stride=1, pool_type='avg') self.relu = nn.LeakyReLU(leaky) if leaky is not None else nn.ReLU( ) #relu(True, leaky=leaky)
def __init__(self): super(LeNetDygraph, self).__init__() self.features = nn.Sequential(nn.Conv2d(1, 6, 3, stride=1, padding=1), nn.ReLU(), nn.Pool2D(2, 'max', 2), nn.Conv2d(6, 16, 5, stride=1, padding=0), nn.ReLU(), nn.Pool2D(2, 'max', 2))