def make_layers(cfg, batch_norm=False): layers = [] in_channels = 3 for v in cfg: if v == 'M': layers += [my_MaxPool2d(kernel_size=2, stride=2)] else: conv2d = my_Conv2d(in_channels, v, kernel_size=3, padding=1) if batch_norm: layers += [conv2d, my_BatchNorm2d(v), my_ReLU(inplace=True)] else: layers += [conv2d, my_ReLU(inplace=True)] in_channels = v return nn.Sequential(*layers)
def __init__(self, features, num_classes=1000, init_weights=True): super(my_VGG, self).__init__(features, num_classes=num_classes, init_weights=init_weights) self.features = features #self.avgpool = nn.AdaptiveAvgPool2d((7, 7)) self.avgpool = my_AvgPool2d(kernel_size=1, stride=1) self.classifier = nn.Sequential( my_Linear(512 * 7 * 7, 4096), # my_Linear(36, 4096), my_ReLU(True), nn.Dropout(), my_Linear(4096, 4096), my_ReLU(True), nn.Dropout(), my_Linear(4096, num_classes), ) self._layers = None self._verbose = True if init_weights: self._initialize_weights()
def __init__(self, in_channels, num_classes, conv_block=None): super(InceptionAux, self).__init__() if conv_block is None: conv_block = BasicConv2d self.avgpool = my_AdaptiveAvgPool2d((4,4)) self.conv = conv_block(in_channels, 128, kernel_size=1) self.relu = my_ReLU(inplace=True) self.fc1 = my_Linear(2048, 1024) self.fc2 = my_Linear(1024, num_classes) self._mode = 0
def __init__(self, in_channels, out_channels, **kwargs): super(BasicConv2d, self).__init__() self.conv = my_Conv2d(in_channels, out_channels, bias=False, **kwargs) self.bn = my_BatchNorm2d(out_channels, eps=0.001) self.relu = my_ReLU(inplace=True) self._mode = 0