def __init__(self): super(Generator, self).__init__() self.conv1 = Conv2d(8, 32, kernel_size=7, stride=1) self.conv2 = Conv2d(32, 64, kernel_size=3, stride=2) self.conv3 = Conv2d(64, 128, kernel_size=3, stride=2) self.conv4 = Conv2d(32, 3, kernel_size=7, stride=1) self.bn1 = nn.BatchNorm2d(32, eps=0.001, track_running_stats=True) self.bn2 = nn.BatchNorm2d(64, eps=0.001, track_running_stats=True) self.bn3 = nn.BatchNorm2d(128, eps=0.001, track_running_stats=True) self.bn4 = nn.BatchNorm2d(64, eps=0.001, track_running_stats=True) self.bn5 = nn.BatchNorm2d(32, eps=0.001, track_running_stats=True) self.repeat_blocks = self._make_repeat_blocks(BasicBlock(128, 128), 6) self.deconv1 = nn.ConvTranspose2d(128, 64, kernel_size=3, stride=2, padding=1) self.deconv2 = nn.ConvTranspose2d(64, 32, kernel_size=3, stride=2, padding=0, output_padding=1) self.relu = nn.ReLU() self.tanh = nn.Tanh()
def __init__(self): super(PatchDiscriminator, self).__init__() self.lrelu = nn.LeakyReLU(0.2) self.conv1 = Conv2d(3, 64, kernel_size=4, stride=2) self.conv2 = Conv2d(69, 128, kernel_size=4, stride=2) self.bn2 = nn.BatchNorm2d(128, eps=0.001, track_running_stats=True) self.conv3 = Conv2d(128, 256, kernel_size=4, stride=2) self.bn3 = nn.BatchNorm2d(256, eps=0.001, track_running_stats=True) self.conv4 = Conv2d(256, 512, kernel_size=4, stride=2) self.bn4 = nn.BatchNorm2d(512, eps=0.001, track_running_stats=True) self.conv5 = Conv2d(512, 512, kernel_size=4, stride=2)
def __init__(self, pretrainded=False, modelpath=None): super(PatchDiscriminator, self).__init__() self.lrelu = nn.LeakyReLU(0.2) self.conv1 = Conv2d(3, 64, kernel_size=4, stride=2) self.conv2 = Conv2d(69, 128, kernel_size=4, stride=2) self.bn2 = nn.BatchNorm2d(128, eps=0.001, track_running_stats=True) self.conv3 = Conv2d(128, 256, kernel_size=4, stride=2) self.bn3 = nn.BatchNorm2d(256, eps=0.001, track_running_stats=True) self.conv4 = Conv2d(256, 512, kernel_size=4, stride=2) self.bn4 = nn.BatchNorm2d(512, eps=0.001, track_running_stats=True) self.conv5 = Conv2d(512, 512, kernel_size=4, stride=2) if pretrainded: self.load_pretrained_params(modelpath)
def __init__(self,pretrainded=False,modelpath=None): super(AgeAlexNet, self).__init__() assert pretrainded is False or modelpath is not None,"pretrain model need to be specified" self.features = nn.Sequential( Conv2d(3, 96, kernel_size=11, stride=4), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.LocalResponseNorm(2,2e-5,0.75), Conv2d(96, 256, kernel_size=5, stride=1,groups=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.LocalResponseNorm(2, 2e-5, 0.75), Conv2d(256, 384, kernel_size=3, stride=1), nn.ReLU(inplace=True), Conv2d(384, 384, kernel_size=3,stride=1,groups=2), nn.ReLU(inplace=True), Conv2d(384, 256, kernel_size=3,stride=1,groups=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.age_classifier=nn.Sequential( nn.Dropout(), nn.Linear(256 * 6 * 6, 4096), nn.ReLU(inplace=True), nn.Dropout(), nn.Linear(4096, 4096), nn.ReLU(inplace=True), nn.Linear(4096, 5), ) if pretrainded is True: self.load_pretrained_params(modelpath) self.Conv3_feature_module=nn.Sequential() self.Conv4_feature_module=nn.Sequential() self.Conv5_feature_module=nn.Sequential() self.Pool5_feature_module=nn.Sequential() for x in range(10): self.Conv3_feature_module.add_module(str(x), self.features[x]) for x in range(10,12): self.Conv4_feature_module.add_module(str(x),self.features[x]) for x in range(12,14): self.Conv5_feature_module.add_module(str(x),self.features[x]) for x in range(14,15): self.Pool5_feature_module.add_module(str(x),self.features[x])
def __init__(self): super(RPN, self).__init__() self.features = VGG16(bn=False) self.conv1 = Conv2d(512, 512, 3, same_padding=True) self.score_conv = Conv2d(512, len(self.anchor_scales) * 3 * 2, 1, relu=False, same_padding=False) self.bbox_conv = Conv2d(512, len(self.anchor_scales) * 3 * 4, 1, relu=False, same_padding=False) # loss self.cross_entropy = None self.los_box = None