def oldResnet34(blk, pretrained=False, progress=True, out_channels=1, **kwargs): return _resnet('resnet34', blk, [3, 4, 6, 3], pretrained, progress, **kwargs)
def resnet50(pretrained=False, fpn=False, frozen=True, exceptions=['layer2', 'layer3', 'layer4'], classifier=False, progress=True, **kwargs): r"""ResNet50 model from [Deep Residual Learning for Image Recognition](https://arxiv.org/pdf/1512.03385.pdf) The keyward argument `norm_layer` defaults to BatchNorm2d. Alternative options include `FixedBatchNorm2d` and `FrozenBatchNorm2d`. The former acts always as in the evaluation mode without taking gradients. The later acts as transparent without changing input and tracking statistics. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet fpn (bool): Whether to include FPN or not exceptions (List[str]): keywords of layers not to freeze progress (bool): If True, displays a progress bar of the download to stderr """ arch = f"resnet50" kwargs['norm_layer'] = kwargs.get('norm_layer', partial(FrozenBatchNorm2d, eps=1e-5) if frozen else None) if fpn: return_layers = {'layer1': 0, 'layer2': 1, 'layer3': 2, 'layer4': 3} else: return_layers = {'layer1': 0, 'layer2': 1, 'layer3': 2, 'layer4': 3, 'avgpool': 4} in_channels_stage2 = 256 in_channels_list = [ in_channels_stage2, in_channels_stage2 * 2, in_channels_stage2 * 4, in_channels_stage2 * 8, ] out_channels = kwargs.get('out_channels', 256) model = _resnet(arch, Bottleneck, [3, 4, 6, 3], pretrained, progress, **kwargs) backbone = Backbone(model, return_layers, in_channels_list, out_channels, fpn=fpn, classifier=classifier) backbone.freeze(exceptions) return backbone
def __init__(self, k=1): super(ResNet50, self).__init__() kwargs = { 'width_per_group': 64 * k, 'pretrained': False, 'progress': True } model = models._resnet('resnet50', self.block, [3, 4, 6, 3], **kwargs) conv1 = nn.Sequential(model.conv1, model.bn1, model.relu) self._feature_blocks = nn.ModuleList([ conv1, model.maxpool, model.layer1, model.layer2, model.layer3, model.layer4, model.avgpool, Flatten(1), ]) self.all_feat_names = [ "conv1", "res1", "res2", "res3", "res4", "res5", "res5avg", "flatten", ] assert len(self.all_feat_names) == len(self._feature_blocks)
def resnext101(pretrained=False, fpn=False, frozen=True, exceptions=['layer2', 'layer3', 'layer4'], classifier=False, progress=True, **kwargs): kwargs['groups'] = gs = kwargs.get('groups', 32) kwargs['width_per_group'] = gw = kwargs.get('width_per_group', 8) kwargs['norm_layer'] = kwargs.get('norm_layer', partial(FrozenBatchNorm2d, eps=1e-5) if frozen else None) if fpn: return_layers = {'layer1': 0, 'layer2': 1, 'layer3': 2, 'layer4': 3} else: return_layers = {'layer1': 0, 'layer2': 1, 'layer3': 2, 'layer4': 3, 'avgpool': 4} in_channels_stage2 = 256 in_channels_list = [ in_channels_stage2, in_channels_stage2 * 2, in_channels_stage2 * 4, in_channels_stage2 * 8, ] out_channels = kwargs.get('out_channels', 256) if pretrained and gs == 32 and gw == 8: # WSL WSL = torch.hub.load('facebookresearch/WSL-Images', 'resnext101_32x8d_wsl', **kwargs) backbone = Backbone(WSL, return_layers, in_channels_list, out_channels, fpn=fpn, classifier=classifier) else: # torchvision arch = f"resnext101_{gs}x{gw}d" model = _resnet(arch, Bottleneck, [3, 4, 23, 3], pretrained, progress, **kwargs) backbone = Backbone(model, return_layers, in_channels_list, out_channels, fpn=fpn, classifier=classifier) backbone.freeze(exceptions) return backbone
def resnet18(pretrained=False, progress=True, **kwargs): """Constructs a ResNet-18 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ return _resnet('resnet18', BasicBlock, [2, 2, 2, 2], pretrained, progress, **kwargs)
def resnet34(pretrained=False, progress=True, **kwargs): """Constructs a ResNet-34 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ return _resnet('resnet34', DistillerBasicBlock, [3, 4, 6, 3], pretrained, progress, **kwargs)
def resnet152(pretrained=False, progress=True, **kwargs): """Constructs a ResNet-152 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ return _resnet('resnet152', DistillerBottleneck, [3, 8, 36, 3], pretrained, progress, **kwargs)
def resnet22(pretrained=False, progress=True, **kwargs): r"""ResNet-22 model from `"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_ Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ return resnet._resnet('resnet22', resnet.BasicBlock, [2, 3, 4, 2], pretrained, progress, **kwargs)
def wide_resnet101_2(pretrained=False, progress=True, **kwargs): """Constructs a Wide ResNet-101-2 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ kwargs['width_per_group'] = 64 * 2 return _resnet('wide_resnet101_2', Bottleneck, [3, 4, 23, 3], pretrained, progress, **kwargs)
def resnext101_32x8d(pretrained=False, progress=True, **kwargs): """Constructs a ResNeXt-101 32x8d model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ kwargs['groups'] = 32 kwargs['width_per_group'] = 8 return _resnet('resnext101_32x8d', DistillerBottleneck, [3, 4, 23, 3], pretrained, progress, **kwargs)
def get_arch(name, num_classes, **arch_params): if 'efficientnet' in name.lower(): return get_efficient_net(name, num_classes=num_classes, **arch_params) elif name == 'resnet9': return _resnet('resnet', BasicBlock, pretrained=False, progress=None, num_classes=num_classes, layers=[1, 1, 1, 1]) else: return arch_dict[name](num_classes=num_classes, **arch_params)
def resnext50_32x4d(pretrained=False, progress=True, num_classes = 1000, **kwargs): """Constructs a ResNeXt-50 32x4d model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ kwargs['groups'] = 32 kwargs['width_per_group'] = 4 kwargs['num_classes'] = num_classes return _resnet('resnext50_32x4d', SE_Bottleneck, [3, 4, 6, 3], pretrained, progress, **kwargs)
def __init__(self, input_dim, seq_len, num_heads, dim_feedforward, num_layers, dropout, num_classes, resnet_params): super(TransformerModel, self).__init__() encoder_layers = TransformerEncoderLayer(input_dim, num_heads, dim_feedforward, dropout) self.transformer_encoder = TransformerEncoder(encoder_layers, num_layers) self.readout = _resnet('resnet', BasicBlock, pretrained=False, progress=None, num_classes=num_classes, **resnet_params)
def x101_32x8d(dev): from torchvision.models.resnet import _resnet from torchvision.models.resnet import Bottleneck from torchvision.ops.misc import FrozenBatchNorm2d kwargs = {} frozen = True kwargs['groups'] = gs = kwargs.get('groups', 32) kwargs['width_per_group'] = gw = kwargs.get('width_per_group', 8) kwargs['norm_layer'] = kwargs.get('norm_layer', FrozenBatchNorm2d if frozen else None) arch = f"resnext101_{gs}x{gw}d" model = _resnet(arch, Bottleneck, [3, 4, 23, 3], True, True, **kwargs) model.to(dev).eval() print(model) return model
def __init__(self, pretrained=True): super().__init__(num_classes=5, sequence_length=8, contains_dropout=False) self.resnet = _resnet( "resnet18", BasicBlock, [1, 1, 1, 1], pretrained=False, progress=True, num_classes=1000, ) self.resnet.conv1 = nn.Conv2d( 1, 64, kernel_size=3, stride=1, padding=1, bias=False ) self.resnet.layer4 = nn.Identity() self.resnet.fc = nn.Linear(256, 5)
def macresnet_encoder(model='resnet50', pretrained=True, progress=True, batch_norm=True, desc_layers=[2, 3]): model_to_config = { 'resnet50': (resnet.Bottleneck, [3, 4, 6, 3]), } if model not in model_to_config: raise NotImplementedError(f'MACResNet not implemented for {model}') block, layers = model_to_config[model] norm_layer = nn.BatchNorm2d if batch_norm else nn.Identity source_resnet = resnet._resnet(model, block, layers, pretrained, progress, norm_layer=norm_layer) return MACResNet(source_resnet, desc_layers)
def _create(self, name, **encoder_kwargs): if name not in _basic_block_layers.keys(): fn_name = getattr(resnet, name) model = fn_name(**encoder_kwargs) else: # special case due to prohibited dilation in the original BasicBlock pretrained = encoder_kwargs.pop('pretrained', False) progress = encoder_kwargs.pop('progress', True) model = resnet._resnet(name, BasicBlockWithDilation, _basic_block_layers[name], pretrained, progress, **encoder_kwargs) replace_stride_with_dilation = encoder_kwargs.get( 'replace_stride_with_dilation', (False, False, False)) assert len(replace_stride_with_dilation) == 3 if replace_stride_with_dilation[0]: model.layer2[0].conv2.padding = (2, 2) model.layer2[0].conv2.dilation = (2, 2) if replace_stride_with_dilation[1]: model.layer3[0].conv2.padding = (2, 2) model.layer3[0].conv2.dilation = (2, 2) if replace_stride_with_dilation[2]: model.layer4[0].conv2.padding = (2, 2) model.layer4[0].conv2.dilation = (2, 2) return model
def resnext18_32x4d(pretrained=False, progress=True, **kwargs): kwargs["groups"] = 32 kwargs["width_per_group"] = 4 return _resnet("resnext18_32x4d", Bottleneck, [2, 2, 2, 2], False, False, **kwargs)
def wide_resnet50_5(pretrained=False, progress=True, **kwargs): kwargs['width_per_group'] = 64 * 5 return _resnet('wide_resnet50_5', Bottleneck, [3, 4, 6, 3], pretrained, progress, **kwargs)
def preresnet34(pretrained=False, progress=True, **kwargs): """Constructs a PreResNet-50 model for ILSVRC12 dataset""" model = _resnet('preresnet34', PreActBasicBlock, [3, 4, 6, 3], pretrained, progress, **kwargs) model.apply(weights_init) return model
def preresnet101(pretrained=False, progress=True, **kwargs): """Constructs a PreResNet-101 model for ILSVRC12 dataset""" model = _resnet('preresnet101', PreActBottleneck, [3, 4, 23, 3], pretrained, progress, **kwargs) model.apply(weights_init) return model