Exemple #1
0
    def __init__(self, C, num_classes, num_cells, auxiliary, genotype, stem_multiplier=3,
                 in_channels=3, emb_dims=1024, dropout=0.5, k=9):
        super(NetworkModelNet40, self).__init__()
        self._layers = num_cells
        self._auxiliary = auxiliary
        self._in_channels = in_channels
        self.drop_path_prob = 0.
        C_curr = stem_multiplier * C
        self.stem = nn.Sequential(
            BasicConv([in_channels, C_curr], None, 'batch', bias=False),
        )

        C_prev_prev, C_prev, C_curr = C_curr, C_curr, C
        self.cells = nn.ModuleList()
        for i in range(self._layers):
            cell = Cell(genotype, C_prev_prev, C_prev, C_curr, k=k, d=i + 1)
            self.cells += [cell]
            C_prev_prev, C_prev = C_prev, cell.multiplier * C_curr
            if i == 2 * self._layers // 3:
                C_to_auxiliary = C_prev

        if auxiliary:
            self.auxiliary_head = AuxiliaryHead(C_to_auxiliary, num_classes)
        self.fusion_conv = BasicConv([stem_multiplier * C + C * cell.multiplier * self._layers, emb_dims],
                                     act='leakyrelu', norm='batch', bias=False)
        self.classifier = nn.Sequential(BasicConv([emb_dims * 2, 512], act='leakyrelu', norm='batch'),
                                        torch.nn.Dropout(p=dropout),
                                        BasicConv([512, 256], act='leakyrelu', norm='batch'),
                                        torch.nn.Dropout(p=dropout),
                                        BasicConv([256, num_classes], act=None, norm=None))
Exemple #2
0
    def __init__(self, genotype, C_prev_prev, C_prev, C, k=9, d=1):
        super(Cell, self).__init__()
        self.preprocess0 = BasicConv([C_prev_prev, C], 'relu', 'batch', bias=False)
        self.preprocess1 = BasicConv([C_prev, C], 'relu', 'batch', bias=False)
        self.dilated_knn_graph = DilatedKnn2d(k=k, dilation=d)

        op_names, indices = zip(*genotype.normal)
        concat = genotype.normal_concat
        self._compile(C, op_names, indices, concat)
Exemple #3
0
    def __init__(self, steps, multiplier, C_prev_prev, C_prev, C, k=9, d=1):
        super(Cell, self).__init__()
        self.preprocess0 = BasicConv([C_prev_prev, C], 'relu', 'batch', bias=False)
        self.preprocess1 = BasicConv([C_prev, C], 'relu', 'batch', bias=False)
        self._steps = steps
        self._multiplier = multiplier
        self.dilated_knn_graph = DilatedKnn2d(k=k, dilation=d)
        self._ops = nn.ModuleList()
        self._bns = nn.ModuleList()

        for i in range(self._steps):
            for j in range(2 + i):
                stride = 1
                op = MixedOp(C, stride)
                self._ops.append(op)
Exemple #4
0
    def __init__(self, C, num_classes, num_cells, criterion,
                 steps=4, multiplier=4, stem_multiplier=3,
                 in_channels=3, emb_dims=1024, dropout=0.5, k=9):
        super(Network, self).__init__()
        self._C = C
        self._num_classes = num_classes
        self._layers = num_cells
        self._criterion = criterion
        self._steps = steps
        self._multiplier = multiplier
        self._stem_multiplier = stem_multiplier
        self._in_channels = in_channels
        self._emb_dims = emb_dims
        self._dropout = dropout
        self._k = k
        C_curr = stem_multiplier * C #32*3=96
        self.stem = nn.Sequential(
            BasicConv([in_channels, C_curr], None, 'batch', bias=False),
        )

        C_prev_prev, C_prev, C_curr = C_curr, C_curr, C
        self.cells = nn.ModuleList()
        for i in range(self._layers):
            # import pdb;pdb.set_trace()
            cell = Cell(steps, multiplier, C_prev_prev, C_prev, C_curr, k=k, d=i+1)
            self.cells += [cell]
            C_prev_prev, C_prev = C_prev, multiplier * C_curr
        # import pdb;pdb.set_trace()
        self.fusion_conv = BasicConv([stem_multiplier*C + C*multiplier*self._layers, emb_dims],
                                     act='leakyrelu', norm='batch', bias=False)
        self.classifier = nn.Sequential(BasicConv([emb_dims*2, 512], act='leakyrelu', norm='batch'),
                                        torch.nn.Dropout(p=dropout),
                                        BasicConv([512, 256], act='leakyrelu', norm='batch'),
                                        torch.nn.Dropout(p=dropout),
                                        BasicConv([256, num_classes], act=None, norm=None))

        self._initialize_alphas()

        self.normal_selected_idxs = torch.tensor(len(self.alphas_normal) * [-1], requires_grad=False, dtype=torch.int)
        self.normal_candidate_flags = torch.tensor(len(self.alphas_normal) * [True], 
                                                   requires_grad=False, dtype=torch.bool)
Exemple #5
0
 def __init__(self, C, num_classes):
     super(AuxiliaryHead, self).__init__()
     self.features = nn.Sequential(
         BasicConv([C, 128, 768], 'relu', 'batch', bias=False))
     self.classifier = nn.Linear(768, num_classes)