def forward(self, x):
     x1 = self.bb(x)
     p6 = self.p6(x1[-1])
     p7 = self.p7(p6)
     outs = x1 + [p6, p7]
     sources = [time_to_batch(item)[0] for item in outs]
     return sources
Beispiel #2
0
    def _embeddings_loss(self, emb, ids, cls_targets, batchsize):
        """
        In Time, promote similarity between targets with same id

        :param pred_embeddings: [TN, #anchors, D]
        :param ids: [TN, #nanchors]
        :return:
        """
        pos = (cls_targets > 0).view(-1)
        emb = emb.view(-1, emb.size(-1))[pos]

        ids = batch_to_time(ids, batchsize)
        offsets = 100 * torch.arange(batchsize)[None, :, None].to(ids.device)
        ids = ids + offsets
        ids = time_to_batch(ids)[0]
        ids = ids.view(-1)[pos]

        #TODO:
        #1. do this per frame not globally (benchmark this)
        #2. weight id with iou loss = iou * id * (1-cos_mat) + (1-id) * max(0, cos_mat - margin)
        # We do cos(x1, x2) between every vectors
        # When they have same id = we use loss as 1 - cos (penalize for being far)
        # When they have not same id = max(0, cos - margin) (penalize for not being close)
        y = (ids[:, None] == ids[None, :])
        norms = torch.norm(emb, dim=1)
        norm_matrix = norms[:, None] * norms[None, :]
        dot_matrix = torch.mm(emb, emb.t())
        cos_matrix = dot_matrix / norm_matrix

        margin = 0.5
        loss = torch.where(y, 1 - cos_matrix, F.relu(cos_matrix - margin))

        return loss.mean()
    def forward(self, x):
        x = self.conv1(x)
        outs = []
        for conv in self.conv2:
            x = conv(x)
            outs.append(time_to_batch(x)[0])

        return outs
    def forward(self, x):
        sources = list()

        x2 = self.conv1(x)
        x3 = self.conv3(x2)
        x4 = self.conv4(x3)
        x5 = self.conv5(x4)
        x6 = self.conv6(x5)

        sources = [time_to_batch(item)[0] for item in [x3, x4, x5, x6]]

        return sources
Beispiel #5
0
 def upsample(self, x, feat_shape):
     x, n = time_to_batch(x)
     x = F.interpolate(x, size=feat_shape, mode='nearest')
     x = batch_to_time(x, n)
     return x
Beispiel #6
0
 def forward_parallel(self, x):
     t, n = x.size(0), x.size(1)
     x = time_to_batch(x)[0]
     x = self.module(x)
     x = batch_to_time(x, n)
     return x
Beispiel #7
0
def sequence_upsample(x, y):
    x, n = time_to_batch(x)
    x = F.interpolate(x, size=y.shape[-2:], mode='nearest')
    x = batch_to_time(x, n)
    return x
 def forward(self, x):
     x1 = self.conv1(x)
     outs = self.conv2(x1)
     outs = [time_to_batch(item)[0] for item in outs]
     return outs
 def forward(self, x):
     x1 = self.bb(x)
     outs = self.neck(x1)
     sources = [time_to_batch(item)[0] for item in outs]
     return sources
Beispiel #10
0
    def forward(self, x):
        x1 = self.conv1(x)
        outs = self.conv2(x1)[-self.levels:]
        sources = [time_to_batch(item)[0] for item in outs][::-1]

        return sources