def __init__(self, arch, norm, pre=False): super().__init__() if arch == 'ResX50': m = ResNet(Bottleneck, [3, 4, 6, 3], groups=32, width_per_group=4) print('Loaded Model RexNextssl') elif arch == 'ResS50': m = resnest50(pretrained=pre) print('Loaded model ResNest') blocks = [*m.children()] enc = blocks[:-2] self.enc = nn.Sequential(*enc) C = blocks[-1].in_features head = [ AdaptiveConcatPool2d(), Flatten(), #bs x 2*C nn.Linear(2 * C, 512), Mish() ] if norm == 'GN': head.append(nn.GroupNorm(32, 512)) print('Group Norm') elif norm == 'BN': head.append(nn.BatchNorm1d(512)) print('Batch Norm') else: print('No Norm') head.append(nn.Dropout(0.5)) head.append(nn.Linear(512, NUM_CLASSES - 1)) self.head = nn.Sequential(*head)
def __init__(self, base_resnet: ResNet, n_classes: int): super().__init__() resnet_children = list(base_resnet.children()) self.activations = nn.Sequential(*resnet_children[:-1]) self.flat = Flatten() self.fc = nn.Linear(base_resnet.fc.in_features, n_classes)
def __init__(self, raw_model_dir, use_flow, logger): super(BackboneModel, self).__init__() self.use_flow = use_flow model = ResNet(Bottleneck, [3, 4, 6, 3]) model.load_state_dict( model_zoo.load_url(model_urls['resnet50'], model_dir=raw_model_dir)) logger.info('Model restored from pretrained resnet50') self.feature = nn.Sequential(*list(model.children())[:-2]) self.base = list(self.feature.parameters()) if self.use_flow: self.flow_branch = self.get_flow_branch(model) self.rgb_branch = nn.Sequential(model.conv1, model.bn1, model.relu, model.maxpool) self.fuse_branch = nn.Sequential(*list(model.children())[4:-2]) self.fea_dim = model.fc.in_features
def __init__(self, base_resnet: ResNet, n_classes: int): super().__init__() self.activations = nn.Sequential(*list(base_resnet.children())[:-3]) last_sublayer = list(self.activations[-1][-1].children())[-1] if isinstance(last_sublayer, nn.BatchNorm2d): n_filters = last_sublayer.num_features elif isinstance(last_sublayer, nn.Conv2d): n_filters = last_sublayer.out_channels else: last_sublayer = list(self.activations[-1][-1].children())[-2] n_filters = last_sublayer.num_features self.gap = nn.Sequential(nn.AvgPool2d(14, 14), Flatten()) self.fc = nn.Linear(n_filters, n_classes, bias=False)