Beispiel #1
0
class FurnitureInceptionV4_350_FC(Module):
    def __init__(self, pretrained=True):
        super(FurnitureInceptionV4_350_FC, self).__init__()

        self.model = inceptionv4(num_classes=1000, pretrained=pretrained)
        self.model.avg_pool = AdaptiveAvgPool2d(1)
        self.final_classifier = Linear(1000, 128)
        self.dropout = Dropout(p=0.5)
        self.relu = ReLU(inplace=True)

        for m in self.final_classifier.modules():
            if isinstance(m, Linear):
                m.weight.data.normal_(0, 0.01)
                m.bias.data.zero_()

        # create aliases:
        self.stem = ModuleList([
            self.model.features[0],
            self.model.features[1],
            self.model.features[2],
        ])
        self.features = ModuleList([
            self.model.features[i] for i in range(3, len(self.model.features))
        ])
        self.classifier = self.model.last_linear

    def forward(self, x):
        x = self.model.features(x)
        x = self.model.logits(x)
        x = self.relu(x)
        x = self.dropout(x)
        x = self.final_classifier(x)
        return x
Beispiel #2
0
class FurnitureSqueezeNetOnCrops(Module):
    def __init__(self, pretrained=True, n_crops=6):
        super(FurnitureSqueezeNetOnCrops, self).__init__()
        model = squeezenet1_1(pretrained=pretrained)
        self.features = model.features
        self.crop_classifiers = []
        for i in range(n_crops):
            # Final convolution is initialized differently form the rest
            final_conv = Conv2d(512, 512, kernel_size=1, bias=False)
            self.crop_classifiers.append(
                Sequential(Dropout(p=0.5), final_conv, ReLU(inplace=True),
                           AdaptiveAvgPool2d(1)))
            for m in final_conv.modules():
                normal_(m.weight, mean=0.0, std=0.01)
                if m.bias is not None:
                    constant_(m.bias, 0.0)

        self.crop_classifiers = ModuleList(self.crop_classifiers)
        self.final_classifier = Linear(512, 128)

        for m in self.final_classifier.modules():
            normal_(m.weight, mean=0.0, std=0.01)
            if m.bias is not None:
                constant_(m.bias, 0.0)

    def forward(self, crops):
        batch_size, n_crops, *_ = crops.shape
        features = []
        for i in range(n_crops):
            x = self.features(crops[:, i, :, :, :])
            x = self.crop_classifiers[i](x)
            features.append(x.view(batch_size, -1))
        x = sum(features)
        return self.final_classifier(x)
class FurnitureInceptionResNetOnFiveCrops(Module):
    def __init__(self, pretrained=True, n_cls_layers=1024):
        super(FurnitureInceptionResNetOnFiveCrops, self).__init__()

        self.model = inceptionresnetv2(num_classes=1000, pretrained=pretrained)
        self.model.avgpool_1a = AdaptiveAvgPool2d(1)

        n_crops = 5
        self.crop_classifiers = []
        for i in range(n_crops):
            self.crop_classifiers.append(Linear(1536, n_cls_layers))
            for m in self.crop_classifiers[-1].modules():
                if isinstance(m, Linear):
                    normal_(m.weight, 0, 0.01)
                    constant_(m.bias, 0.0)

        # create aliases:
        self.stem = ModuleList([
            self.model.conv2d_1a,
            self.model.conv2d_2a,
            self.model.conv2d_2b,
        ])
        self.low_features = ModuleList([
            self.model.mixed_5b, self.model.repeat, self.model.mixed_6a,
            self.model.repeat_1
        ])
        self.features = ModuleList([
            self.model.mixed_7a, self.model.repeat_2, self.model.block8,
            self.model.conv2d_7b
        ])
        self.crop_classifiers = ModuleList(self.crop_classifiers)

        self.drop = Dropout(p=0.45)
        self.relu = ReLU(inplace=True)

        self.final_classifier = Linear(n_cls_layers, 128)
        for m in self.final_classifier.modules():
            normal_(m.weight, mean=0.0, std=0.01)
            if m.bias is not None:
                constant_(m.bias, 0.0)

    def logits(self, index, features):
        x = self.model.avgpool_1a(features)
        x = x.view(x.size(0), -1)
        x = self.crop_classifiers[index](x)
        return x

    def forward(self, crops):
        batch_size, n_crops, *_ = crops.shape

        # Compute features on the crop 0
        features = self.model.features(crops[:, 0, :, :, :])
        features = self.logits(0, features)
        # Add other features
        for i in range(1, n_crops):
            x = self.model.features(crops[:, i, :, :, :])
            features += self.logits(i, x)
        features = self.relu(features)
        features = self.drop(features)
        return self.final_classifier(features)
class FurnitureModelOnCrops(Module):

    def __init__(self, features, featuremap_output_size, n_cls_layers=512):
        super(FurnitureModelOnCrops, self).__init__()

        self.base_features = features
        self.avgpool = AdaptiveAvgPool2d(1)

        n_crops = 6
        self.crop_classifiers = []
        for i in range(n_crops):
            self.crop_classifiers.append(
                Sequential(
                    ReLU(),
                    Linear(featuremap_output_size, n_cls_layers),
                    ReLU(),
                    Dropout(p=0.4)
                )
            )
            for m in self.crop_classifiers[-1].modules():
                if isinstance(m, Linear):
                    normal_(m.weight, 0, 0.01)
                    constant_(m.bias, 0.0)

        self.crop_classifiers = ModuleList(self.crop_classifiers)

        self.final_classifier = Linear(n_cls_layers, 128)
        for m in self.final_classifier.modules():
            normal_(m.weight, mean=0.0, std=0.01)
            if m.bias is not None:
                constant_(m.bias, 0.0)

    def logits(self, index, features):
            x = self.avgpool(features)
            x = x.view(x.size(0), -1)
            x = self.crop_classifiers[index](x)
            return x

    def forward(self, crops):
        batch_size, n_crops, *_ = crops.shape

        # Compute features on the crop 0
        features = self.base_features(crops[:, 0, :, :, :])
        features = self.logits(0, features)
        # Add other features
        for i in range(1, n_crops):
            x = self.base_features(crops[:, i, :, :, :])
            features += self.logits(i, x)

        return self.final_classifier(features)