def __init__(self, num_classes, pretrained="imagenet"): super().__init__() self.net = xception(pretrained=pretrained) self.avg_pool = AdaptiveConcatPool2d() self.net.last_linear = nn.Sequential(Flatten(), SEBlock(2048 * 2), nn.Dropout(), nn.Linear(2048 * 2, num_classes))
def __init__(self, num_classes, pretrained="imagenet"): super().__init__() self.net = inceptionresnetv2(pretrained=pretrained) self.net.avgpool_1a = AdaptiveConcatPool2d() self.net.last_linear = nn.Sequential(Flatten(), SEBlock(1536 * 2), nn.Dropout(), nn.Linear(1536 * 2, num_classes))
def __init__(self, num_classes, encoder='efficientnet-b0', pool_type="avg"): super().__init__() n_channels_dict = { 'efficientnet-b0': 1280, 'efficientnet-b1': 1280, 'efficientnet-b2': 1408, 'efficientnet-b3': 1536, 'efficientnet-b4': 1792, 'efficientnet-b5': 2048, 'efficientnet-b6': 2304, 'efficientnet-b7': 2560 } self.net = EfficientNet.from_pretrained(encoder) self.avg_pool = nn.AdaptiveAvgPool2d(1) if pool_type == "concat": self.net.avg_pool = AdaptiveConcatPool2d() out_shape = n_channels_dict[encoder] * 2 elif pool_type == "avg": self.net.avg_pool = nn.AdaptiveAvgPool2d((1, 1)) out_shape = n_channels_dict[encoder] elif pool_type == "gem": self.net.avg_pool = GeM() out_shape = n_channels_dict[encoder] self.classifier = nn.Sequential(Flatten(), SEBlock(out_shape), nn.Dropout(), nn.Linear(out_shape, num_classes))
def __init__(self, num_classes, encoder="se_resnext50_32x4d", pretrained="imagenet", pool_type="concat"): super().__init__() self.net = encoders[encoder]["encoder"](pretrained=pretrained) if encoder in ["resnet34", "resnet50"]: if pool_type == "concat": self.net.avgpool = AdaptiveConcatPool2d() out_shape = encoders[encoder]["out_shape"] * 2 elif pool_type == "avg": self.net.avgpool = nn.AdaptiveAvgPool2d((1, 1)) out_shape = encoders[encoder]["out_shape"] elif pool_type == "gem": self.net.avgpool = GeM() out_shape = encoders[encoder]["out_shape"] self.net.fc = nn.Sequential(Flatten(), SEBlock(out_shape), nn.Dropout(), nn.Linear(out_shape, num_classes)) elif encoder == "inceptionresnetv2": if pool_type == "concat": self.net.avgpool_1a = AdaptiveConcatPool2d() out_shape = encoders[encoder]["out_shape"] * 2 elif pool_type == "avg": self.net.avgpool_1a = nn.AdaptiveAvgPool2d((1, 1)) out_shape = encoders[encoder]["out_shape"] elif pool_type == "gem": self.net.avgpool_1a = GeM() out_shape = encoders[encoder]["out_shape"] self.net.last_linear = nn.Sequential( Flatten(), SEBlock(out_shape), nn.Dropout(), nn.Linear(out_shape, num_classes)) else: if pool_type == "concat": self.net.avg_pool = AdaptiveConcatPool2d() out_shape = encoders[encoder]["out_shape"] * 2 elif pool_type == "avg": self.net.avg_pool = nn.AdaptiveAvgPool2d((1, 1)) out_shape = encoders[encoder]["out_shape"] elif pool_type == "gem": self.net.avg_pool = GeM() out_shape = encoders[encoder]["out_shape"] self.net.last_linear = nn.Sequential( Flatten(), SEBlock(out_shape), nn.Dropout(), nn.Linear(out_shape, num_classes))
def __init__( self, base="efficientnet-b0", pool_type="gem", in_ch=3, out_ch=1, pretrained=False, ): super(CustomEfficientNet, self).__init__() assert base in { "efficientnet-b0", "efficientnet-b1", "efficientnet-b2", "efficientnet-b3", "efficientnet-b4", } assert pool_type in {"concat", "avg", "gem"} self.base = base self.in_ch = in_ch self.out_ch = out_ch self.pretrained = pretrained if pretrained: self.net = EfficientNet.from_pretrained(base) else: self.net = EfficientNet.from_name(base) out_shape = self.net._fc.in_features if pool_type == "concat": self.net._avg_pooling = AdaptiveConcatPool2d() out_shape = out_shape * 2 elif pool_type == "gem": self.net._avg_pooling = GeM() out_shape = out_shape self.net._fc = nn.Sequential(Flatten(), SEBlock(out_shape), nn.Dropout(), nn.Linear(out_shape, out_ch)) if in_ch != 3: old_in_ch = 3 old_conv = self.net._conv_stem # Make new weight weight = old_conv.weight new_weight = torch.cat([weight] * (self.in_ch // old_in_ch), dim=1) # Make new conv new_conv = nn.Conv2d( in_channels=self.in_ch, out_channels=old_conv.out_channels, kernel_size=old_conv.kernel_size, stride=old_conv.stride, padding=old_conv.padding, bias=old_conv.bias, ) self.net._conv_stem = new_conv self.net._conv_stem.weight = nn.Parameter(new_weight)
def __init__(self, num_classes, pretrained=False, net_cls=models.densenet121): super().__init__() self.net = net_cls(pretrained=pretrained) self.avg_pool = AdaptiveConcatPool2d() self.net.classifier = nn.Sequential(Flatten(), SEBlock(1024 * 2), nn.Dropout(), nn.Linear(1024 * 2, num_classes))
def __init__(self, num_classes, pretrained=False, net_cls=models.resnet50): super().__init__() self.net = create_net(net_cls, pretrained=pretrained) self.net.avgpool = AdaptiveConcatPool2d() self.net.fc = nn.Sequential(Flatten(), SEBlock(2048 * 2), nn.Dropout(), nn.Linear(2048 * 2, num_classes))