コード例 #1
0
ファイル: PN.py プロジェクト: lwj2018/islr-few-shot
    def forward(self, support, queries, mode='train'):
        if mode == 'train':
            way = self.train_way
            query = self.query
        else:
            way = self.test_way
            query = self.query_val
        # Concatenate
        x = torch.cat([support, queries], 0)
        # Embed all samples
        embeddings = self.baseModel(x)

        # Samples are ordered by the NShotWrapper class as follows:
        # k lots of n support samples from a particular class
        # k lots of q query samples from those classes
        support = embeddings[:self.shot * way]
        queries = embeddings[self.shot * way:]
        prototypes = compute_prototypes(support, way, self.shot)
        # Calculate squared distances between all queries and all prototypes
        # Output should have shape (q_queries * k_way, k_way) = (num_queries, k_way)
        distances = euclidean_metric(queries, prototypes)

        # Calculate log p_{phi} (y = k | x)
        y_pred = (distances).log_softmax(dim=1)
        label = create_nshot_task_label(way, query).cuda()
        return y_pred, label
コード例 #2
0
 def gfsl_test(self, support, queries):
     queries = self.baseModel(queries)
     support, _, _ = self.g(support.unsqueeze(1))
     support = support.squeeze(1)
     queries = self.f(support, queries)
     distances = euclidean_metric(queries, support)
     distances = F.normalize(distances, 1, dim=1)
     attention = (distances).softmax(dim=1)
     return attention
コード例 #3
0
ファイル: GCR_expand.py プロジェクト: lwj2018/islr-few-shot
    def forward(self, data_shot, data_query, lab, mode='train'):
        if mode == 'train':
            way = self.train_way
            query = self.query
        else:
            way = self.test_way
            query = self.query_val

        p = self.shot * way
        gt = lab[:p].reshape(self.shot, way)[0, :]
        proto = self.baseModel(data_shot)
        proto = proto.reshape(self.shot, way, -1)

        proto_final = self.induction(proto)

        # shape of global_new is: total_class(100) x z_dim(512)
        # shape of proto_new is: way(20 or 5) x z_dim(512)
        global_new, proto_new = self.registrator(support_set=torch.cat(
            [self.global_base, self.global_novel]),
                                                 query_set=proto_final)
        # shape of the dist_metric is: way x total_class
        logits2 = euclidean_metric(self.extra2(proto_new),
                                   self.extra2(global_new))

        similarity = F.normalize(logits2, 1, -1)
        # similarity = logits2
        feature = torch.matmul(
            similarity, torch.cat([self.global_base, self.global_novel]))
        # shape of data_query is: (query x way) x ...
        # shape of feature is: way x f_dim(1600)
        # so the shape of result is (query x way) x way
        q_proto = self.baseModel(data_query)
        logits = euclidean_metric(self.extra1(q_proto), self.extra1(feature))
        label = torch.arange(way).repeat(query)
        label = label.type(torch.cuda.LongTensor)

        gt3 = gt.repeat(query)
        # logits3 = euclidean_metric(proto.reshape(self.shot*way,-1),torch.cat([self.global_base,self.global_novel]))
        logits3 = euclidean_metric(
            self.extra1(q_proto.reshape(query * way, -1)),
            self.extra1(torch.cat([self.global_base, self.global_novel])))

        return logits, label, logits2, gt, logits3, gt3
コード例 #4
0
    def forward(self, support, queries, mode='train'):
        if mode == 'train':
            way = self.train_way
            query = self.query
        else:
            way = self.test_way
            query = self.query_val
        # Concatenate
        x = torch.cat([support, queries], 0)
        # Embed all samples
        embeddings = self.baseModel(x)

        # Samples are ordered by the NShotWrapper class as follows:
        # k lots of n support samples from a particular class
        # k lots of q query samples from those classes
        support = embeddings[:self.shot * way]
        queries = embeddings[self.shot * way:]
        support = support.reshape(self.shot, way, -1)
        support = support.permute(1, 0, 2).reshape(way * self.shot, -1)
        queries = queries.reshape(query, way, -1)
        queries = queries.permute(1, 0, 2).reshape(way * query, -1)

        # LSTM requires input of shape (seq_len, batch, input_size). `support` is of
        # shape (k_way * n_shot, embedding_dim) and we want the LSTM to treat the
        # support set as a sequence so add a single dimension to transform support set
        # to the shape (k_way * n_shot, 1, embedding_dim) and then remove the batch dimension
        # afterwards

        # Calculate the fully conditional embedding, g, for support set samples as described
        # in appendix A.2 of the paper. g takes the form of a bidirectional LSTM with a
        # skip connection from inputs to outputs
        support, _, _ = self.g(support.unsqueeze(1))
        support = support.squeeze(1)

        # Calculate the fully conditional embedding, f, for the query set samples as described
        # in appendix A.1 of the paper.
        queries = self.f(support, queries)

        # Efficiently calculate distance between all queries and all prototypes
        # Output should have shape (q_queries * k_way, k_way * n_shot) = (num_queries, num_support)
        distances = euclidean_metric(queries, support)

        # Calculate "attention" as softmax over support-query distances
        distances = F.normalize(distances, 1, dim=1)
        attention = (distances).softmax(dim=1)

        # Calculate predictions as in equation (1) from Matching Networks
        # y_hat = \sum_{i=1}^{k} a(x_hat, x_i) y_i
        y_pred = matching_net_predictions(attention, self.shot, way, query)
        y_pred = y_pred.log()
        label = create_nshot_task_label_t(way, query).cuda()
        return y_pred, label
コード例 #5
0
ファイル: PN.py プロジェクト: lwj2018/islr-few-shot
 def gfsl_test(self, support, queries):
     queries = self.baseModel(queries)
     distances = euclidean_metric(queries, support)
     return distances