Esempio n. 1
0
def wbc(bboxes, scores, thr=0.2):
    # bboxes and score must be tensors
    if isinstance(bboxes, list):
        device = bboxes[0].device
        bboxes = torch.stack(bboxes).to("cpu")
    else:
        device = bboxes.device
        bboxes = bboxes.to("cpu")

    if isinstance(scores, list):
        scores = torch.stack(scores).to("cpu")
    else:
        scores = scores.to("cpu")

    # change the order from xywh to xyxy
    bboxes = np.asarray(change_box_order(bboxes, order="xywh2xyxy"))
    scores = np.asarray(scores)

    # perform (my version) of weighted box clustering
    wbc_bboxes, wbc_scores = weighted_box_clustering(bboxes, scores, thr=thr)

    # # filter out overlapping boxes
    # wbc_bboxes, wbc_scores = rm_overlapping_boxes(torch.tensor(wbc_bboxes, dtype=torch.float32),
    #                                               torch.tensor(wbc_scores, dtype=torch.float32))

    # convert the boxes in the "xywh" form again
    wbc_bboxes = change_box_order(torch.tensor(wbc_bboxes, dtype=torch.float32),
                                  order="xyxy2xywh").to(device)

    wbc_scores = torch.tensor(wbc_scores, dtype=torch.float32).to(device)

    return wbc_bboxes, wbc_scores
Esempio n. 2
0
def calc_detection_hits(gt_bbox, crop_bbox, score_bbox, iou_thr = 0.2, score_thr=0.):
    # bboxes and score must be tensors
    if isinstance(crop_bbox, list):
        crop_bbox = torch.stack(crop_bbox)

    if isinstance(score_bbox, list):
        score_bbox = torch.stack(score_bbox)

    # sort according to score and limit according to score_thr
    scores, indices = torch.sort(score_bbox, descending=True)
    boxes = crop_bbox[indices]

    boxes = boxes[scores > score_thr]
    scores = scores[scores > score_thr]

    # calculate iou
    iou_matrix = box_iou(change_box_order(gt_bbox, order='xywh2xyxy'),
                         change_box_order(boxes, order="xywh2xyxy"))

    # create label list, denoting at max one detection per lesion
    labels = torch.zeros_like(scores)
    iou_hits = torch.zeros_like(scores)
    for i in range(iou_matrix.shape[0]):
        hits = (iou_matrix[i] > iou_thr).nonzero()
        if len(hits) > 0:
            labels[hits[0].cpu()] = 1.
            iou_hits[hits[0].cpu()] = iou_matrix[i, hits[0].cpu()]

    return labels, scores, iou_hits
Esempio n. 3
0
    def forward(self, data, loc_preds, loc_targets, cls_preds, cls_targets):
        batch_size = loc_preds.shape[0]
        anchors = Anchors()
        labels = []
        preds = []

        for i in range(batch_size):

            pred_boxes, pred_labels, pred_score = anchors.generateBoxesFromAnchors(
                loc_preds[i],
                cls_preds[i],
                tuple(data[i].shape[1:]),
                cls_tresh=0.05)

            #start = time.time()
            target_boxes, target_labels = anchors.restoreBoxesFromAnchors(
                loc_targets[i], cls_targets[i], tuple(data[i].shape[1:]))
            #end = time.time()
            #print(end-start)

            if pred_boxes is None and target_boxes is None:
                continue

            if pred_boxes is None:
                preds.append(torch.zeros_like(target_labels))
                labels.append(target_labels)
                continue

            if target_boxes is None:
                preds.append(pred_labels)
                labels.append(torch.zeros_like(pred_labels))
                continue

            pred_boxes = change_box_order(pred_boxes, order='xywh2xyxy')
            target_boxes = change_box_order(target_boxes, order='xywh2xyxy')

            iou_matrix = box_iou(target_boxes, pred_boxes)
            iou_matrix = iou_matrix > self.iou_thr

            box_labels = torch.clamp(torch.sum(iou_matrix, 0), 0, 1)

            preds.append(pred_score)
            labels.append(box_labels)

        labels = torch.tensor([item for sublist in labels for item in sublist])\
            .type(torch.float32)
        preds = torch.tensor([item for sublist in preds for item in sublist])\
            .type(torch.float32)

        if not any(labels):
            return float_to_tensor(0.5)
        elif all(labels):
            return float_to_tensor(1)
        elif labels.dim() == 0:
            return float_to_tensor(0)
        else:
            return float_to_tensor(roc_auc_score(labels, preds))
Esempio n. 4
0
def gt_overlap(image_bbox, crop_bbox, iou_thr = 0.2):
    # convert bbox format from [x, y, w, h] to [x_1, y_1, x_2, y_2] for IoU
    # calculation
    image_bbox = change_box_order(image_bbox, order='xywh2xyxy')
    crop_bbox = change_box_order(crop_bbox, order="xywh2xyxy")

    # determine overlap with ground truth bboxes
    iou_matrix = box_iou(image_bbox, crop_bbox)
    iou_matrix = iou_matrix > iou_thr

    # assign each detected bbox a label: 1 if its overlap with the ground
    # truth is higher than the given threshold, 0 otherwise
    box_class = torch.clamp(torch.sum(iou_matrix, 0), 0, 1)

    return box_class
Esempio n. 5
0
def nms(bboxes, scores, labels=None, thr=0.2):
    # merge overlapping bounding boxes

    # bboxes and score must be tensors
    if isinstance(bboxes, list):
        bboxes = torch.stack(bboxes)

    if isinstance(scores, list):
        scores = torch.stack(scores)

    # sort the score in descending order, adjust bboxes accordingly
    scores, indices = torch.sort(scores, descending=True)
    bboxes = bboxes[indices]
    if labels is not None:
        if isinstance(labels, list):
            labels = torch.tensor(labels)

        labels = labels[indices]

    # change the order from xywh to xyxy
    bboxes = change_box_order(bboxes, order="xywh2xyxy")

    # limit the amount of bboxes to 300 (or less)
    if len(scores) > 300:
        limit = 300
    else:
        limit = len(scores)
    scores = scores[0:limit]
    bboxes = bboxes[0:limit]
    if labels is not None:
        labels = labels[0:limit]

    # perform an image-wise NMS
    keep_ids = box_nms(bboxes, scores, threshold=thr)
    bboxes = bboxes[keep_ids]
    scores = scores[keep_ids]
    if labels is not None:
        labels = labels[keep_ids]
    else:
        labels = []

    # filter out overlapping bboxes
    #bboxes, scores, labels = rm_overlapping_boxes(bboxes, scores, labels=labels)

    # convert in xywh form again
    bboxes = change_box_order(bboxes, order="xyxy2xywh")

    return bboxes, scores, labels
Esempio n. 6
0
def calc_tp_fn_fp(gt_bbox, crop_bbox, score_bbox, iou_thr = 0.2,
                  confidence_values = np.arange(0.5, 1, 0.05)):
    # bboxes and score must be tensors
    if isinstance(crop_bbox, list):
        crop_bbox = torch.stack(crop_bbox)

    if isinstance(score_bbox, list):
        score_bbox = torch.stack(score_bbox)

    tp_list = []
    fp_list = []
    fn_list = []
    iou_list = []
    gt_bbox = change_box_order(gt_bbox, order='xywh2xyxy')

    for j in confidence_values:
        current_bbox = crop_bbox[score_bbox > j]

        if len(current_bbox) == 0:
            tp_list.append(torch.tensor(0, device = current_bbox.device))
            fp_list.append(torch.tensor(0, device = current_bbox.device))
            fn_list.append(torch.tensor(gt_bbox.shape[0],
                                        device = current_bbox.device))
            continue
            # break

        iou_matrix = box_iou(gt_bbox,
                             change_box_order(current_bbox,
                                              order="xywh2xyxy"))
        hits = iou_matrix > iou_thr
        iou_values = iou_matrix[hits]
        iou_list.append(iou_values)

        # true positives are the lesions that are recognized
        # count only one detected box per lesion as positive
        tp = torch.clamp(torch.sum(hits, 1), 0, 1).sum()
        tp_list.append(tp)

        # false negatives are the lesions that are NOT recognized
        fn = gt_bbox.shape[0] - tp
        fn_list.append(fn)

        # number of false positives
        fp = (current_bbox.shape[0] - tp).type(torch.float32)
        fp_list.append(fp)

    return tp_list, fp_list, fn_list
Esempio n. 7
0
def rm_overlapping_boxes(bboxes, scores, labels=None, order="xyxy"):
    if order == "xywh":
        bboxes = change_box_order(bboxes, order="xywh2xyxy")

    scores, indices = torch.sort(scores, descending=True)
    bboxes = bboxes[indices]
    if labels is not None:
        labels = labels[indices]
        result_labels = [labels[0]]

    # filter out overlapping bboxes
    result_bboxes = [bboxes[0]]
    result_scores = [scores[0]]

    for i in range(1, len(bboxes)):
        ignore = False
        for j in range(len(result_bboxes)):
            # if i == j:
            #     continue

            if box_overlap(bboxes[i], result_bboxes[j]):
                ignore = True
                break
        if not ignore:
            result_bboxes.append(bboxes[i])
            result_scores.append(scores[i])
            if labels is not None:
                result_labels.append(labels[i])

    result_bboxes = torch.stack(result_bboxes)
    result_scores = torch.stack(result_scores)

    if order == "xywh":
        result_bboxes = change_box_order(result_bboxes, order="xyxy2xywh")

    if labels is not None:
        return result_bboxes, result_scores, torch.stack(result_labels)
    else:
        return result_bboxes, result_scores, None
Esempio n. 8
0
def eval(dataset, model_path, plot=False):
    # device
    device = 'cuda'

    # load model
    checkpoint = torch.load(model_path)
    model = RetinaNet(**checkpoint['init_kwargs']).eval()
    model.load_state_dict(checkpoint['state_dict'])
    model.to(device)

    # hyperparams
    crop_size = [600, 600]
    overlapped_boxes = 0.5
    confidence_values = np.arange(0.5, 1, 0.05)
    tpr_list = []
    fppi_list = []

    with torch.no_grad():
        for i in tqdm(range(len(dataset))):
            torch.cuda.empty_cache()

            # get image data
            test_data = dataset[i]

            # crop background
            test_data = inbreast_utils.segment_breast(test_data)
            image_bbox = utils.bounding_box(dataset[i]["seg"])

            # generate crops
            crop_list, corner_list = inbreast_utils.create_crops(test_data)

            # define list for predicted bboxes in crops
            crop_bbox = []
            score_bbox = []

            # plot the image with the according bboxes
            if plot:
                # plot image
                plt.figure(1, figsize=(15, 10))
                fig, ax = plt.subplots(1)

                ax.imshow(test_data["data"][0, :, :], cmap='Greys_r')

                # show bboxes as saved in data (in red with center)
                for l in range(len(image_bbox)):
                    pos = tuple(image_bbox[l][0:2])
                    plt.plot(pos[0], pos[1], 'r.')
                    width = image_bbox[l][2]
                    height = image_bbox[l][3]
                    pos = (pos[0] - np.floor(width / 2),
                           pos[1] - np.floor(height / 2))

                    # Create a Rectangle patch
                    rect = patches.Rectangle(pos,
                                             width,
                                             height,
                                             linewidth=1,
                                             edgecolor='r',
                                             facecolor='none')
                    ax.add_patch(rect)

            # iterate over crops
            for j in tqdm(range(0, len(crop_list))):
                torch.cuda.empty_cache()
                test_image = torch.Tensor(crop_list[j]['data']).to(device)
                test_bbox = utils.bounding_box(crop_list[j]['seg'])

                # predict anchors and labels for the crops using the loaded model
                anchor_preds, cls_preds = model(test_image.unsqueeze(0))

                # convert the predicted anchors to bboxes
                anchors = Anchors()
                boxes, labels, score = anchors.generateBoxesFromAnchors(
                    anchor_preds[0].to('cpu'),
                    cls_preds[0].to('cpu'),
                    tuple(test_image.shape[1:]),
                    cls_tresh=0.05)

                # correct the predicted bboxes
                for k in range(len(boxes)):
                    center_corrected = boxes[k][0:2] + \
                                       torch.Tensor(corner_list[j])
                    crop_bbox.append(
                        torch.cat((center_corrected, boxes[k][2:])))
                    score_bbox.append(score[k])

            # merge overlapping bounding boxes
            crop_bbox, score_bbox = merge(crop_bbox, score_bbox)

            # calculate the FROC metric (TPR vs. FPPI)
            tpr_int = []
            fppi_int = []
            image_bbox = change_box_order(torch.Tensor(image_bbox),
                                          order='xywh2xyxy').to('cpu')
            iou_thr = 0.2
            for j in confidence_values:
                current_bbox = crop_bbox[score_bbox > j]

                if len(current_bbox) == 0:
                    tpr_int.append(torch.Tensor([0]))
                    fppi_int.append(torch.Tensor([0]))
                    continue
                    #break

                iou_matrix = box_iou(
                    image_bbox,
                    change_box_order(current_bbox, order="xywh2xyxy"))
                iou_matrix = iou_matrix > iou_thr

                # true positives are the lesions that are recognized
                tp = iou_matrix.sum()

                # false negatives are the lesions that are NOT recognized
                fn = image_bbox.shape[0] - tp

                # true positive rate
                tpr = tp.type(torch.float32) / (tp + fn).type(torch.float32)
                tpr = torch.clamp(tpr, 0, 1)

                # number of false positives per image
                fp = (current_bbox.shape[0] - tp).type(torch.float32)

                tpr_int.append(tpr)
                fppi_int.append(fp)
            tpr_list.append(tpr_int)
            fppi_list.append(fppi_int)

            if plot:
                # show the predicted bboxes (in blue)
                print("Number of detected bboxes: {0}".format(len(crop_bbox)))
                keep = score_bbox > 0.5
                crop_bbox = crop_bbox[keep]
                score_bbox = score_bbox[keep]
                for j in range(len(crop_bbox)):
                    width = crop_bbox[j][2]
                    height = crop_bbox[j][3]
                    pos = (crop_bbox[j][0] - torch.floor(width / 2),
                           crop_bbox[j][1] - torch.floor(height / 2))

                    # Create a Rectangle patch
                    rect = patches.Rectangle(pos,
                                             width,
                                             height,
                                             linewidth=1,
                                             edgecolor='b',
                                             facecolor='none')
                    ax.add_patch(rect)
                    ax.annotate("{:.2f}".format(score_bbox[j]),
                                pos,
                                fontsize=6,
                                xytext=(pos[0] + 10, pos[1] - 10))

                    print("BBox params: {0}, score: {1}".format(
                        crop_bbox[j], score_bbox[j]))
                plt.show()

            #     fig.savefig("../plots/" + "_".join(model_path.split("/")[5:8]) + ".png")

        # calculate FROC over all test images
        tpr_list = np.asarray(tpr_list)
        tpr = np.sum(tpr_list, axis=0) / tpr_list.shape[0]

        fppi_list = np.asarray(fppi_list)
        fppi = np.sum(fppi_list, axis=0) / fppi_list.shape[0]

    # plt.figure(1)
    # plt.ylim(0, 1.1)
    # plt.xlabel("False Positve per Image (FPPI)")
    # plt.ylabel("True Positive Rate (TPR)")
    # plt.title("Free Response Operating Characteristic (FROC)")
    # plt.plot(np.asarray(fppi), np.asarray(tpr), "rx-")
    # plt.show()

    return tpr, fppi
Esempio n. 9
0
def merge_jung(bboxes, scores, merge_thr=0.2):
    def iteration(annotation_list):
        total_set_list = []
        merged_list = []
        for annotation in annotation_list:
            if len(total_set_list) == 0:
                total_set_list.append([annotation])
            else:
                isAppended = False
                for set in total_set_list:
                    for saved_annotation in set:
                        iou = box_iou(saved_annotation[0].view(1, -1),
                                      annotation[0].view(1, -1),
                                      "xyxy")
                        isCovered = isInside(saved_annotation[0],
                                             annotation[0])
                        #if iou > merge_thr or isCovered:
                        if iou > 0.2:
                            set.append(annotation)
                            isAppended = True
                            break

                    if isAppended == True:
                        break

                if isAppended == False:
                    total_set_list.append([annotation])

        for set in total_set_list:
            value = 0
            max_area = 0
            max_info = None
            max_value = 0
            for idx, saved_annotation in enumerate(set):
                area = (saved_annotation[0][3] - saved_annotation[0][1]) * \
                       (saved_annotation[0][2] - saved_annotation[0][0])
                value += saved_annotation[1]

                if area > max_area:
                    max_area = area
                    max_info = saved_annotation
                if float(saved_annotation[1]) > max_value:
                    max_value = float(saved_annotation[1])

            value = float(value) / len(set)
            merged_list.append((max_info[0], max_value))
        return merged_list

    # bboxes and score must be tensors
    if isinstance(bboxes, list):
        bboxes = torch.stack(bboxes)

    if isinstance(scores, list):
        #scores = np.asarray(torch.stack(scores))
        scores = torch.stack(scores)

    # change the order from xywh to xyxy
    bboxes = change_box_order(bboxes, order="xywh2xyxy")

    # save boxes and scores in required format
    raw_list = [[bboxes[i], scores[i]] for i in range(len(bboxes))]

    # remove overlapping boxes (first run)
    final_list = iteration(raw_list)

    # remove remaining overlapping boxes till no box can be merged anymore
    change = True
    while change:
        length = len(final_list)
        final_list = iteration(final_list)
        if length == len(final_list):
            change = False

    bboxes = [final_list[i][0] for i in range(len(final_list))]
    bboxes = torch.stack(bboxes)

    scores = [final_list[i][1] for i in range(len(final_list))]
    scores = torch.tensor(scores, dtype=torch.float32)

    # filter out overlapping bboxes
    #bboxes, scores = rm_overlapping_boxes(bboxes, scores)

    # change box order
    bboxes = change_box_order(bboxes, order="xyxy2xywh")

    # sort the score in descending order, adjust bboxes accordingly
    scores, indices = torch.sort(scores, descending=True)
    bboxes = bboxes[indices]

    return bboxes, scores
Esempio n. 10
0
def my_merging_2(bboxes, scores, crop_center_factor, heatmap_factor, thr=0.2):
    # bboxes and score must be tensors
    if isinstance(bboxes, list):
        device = bboxes[0].device
        bboxes = torch.stack(bboxes).to("cpu")
    else:
        device = bboxes.device
        bboxes = bboxes.to("cpu")

    if isinstance(scores, list):
        scores = torch.stack(scores).to("cpu")

    # sort the score in descending order, adjust bboxes accordingly
    scores, indices = torch.sort(scores, descending=True)
    bboxes = bboxes[indices]

    # change the order from xywh to xyxy
    bboxes = change_box_order(bboxes, order="xywh2xyxy")

    box_set_list = []
    for i in range(len(bboxes)):
        if len(box_set_list) == 0:
            box_set_list.append([[bboxes[i], scores[i]]])
        else:
            isAppended = False
            for box_set in box_set_list:
                for saved_box in box_set:
                    iou = box_iou(saved_box[0].view(1, -1),
                                  bboxes[i].view(1, -1),
                                  "xyxy")
                    isCovered = isInside(saved_box[0],
                                         bboxes[i])
                    if iou > thr or isCovered:
                        # if iou > 0.2:
                        box_set.append([bboxes[i], scores[i]])
                        isAppended = True
                        break

                if isAppended == True:
                    break

            if isAppended == False:
                box_set_list.append([[bboxes[i], scores[i]]])

    merged_box_list = []
    merged_score_list = []
    for i in range(len(box_set_list)):
        pos = [box_set_list[i][j][0] for j in range(len(box_set_list[i]))]
        scores = [box_set_list[i][j][1] for j in range(len(box_set_list[i]))]

        pos = torch.stack(pos)
        scores = torch.stack(scores)

        xx1 = torch.max(pos[0, 0], pos[:,0])
        yy1 = torch.max(pos[0, 1], pos[:,1])
        xx2 = torch.min(pos[0, 2], pos[:,2])
        yy2 = torch.min(pos[0, 3], pos[:,3])

        w = torch.max(torch.Tensor([0]), xx2 - xx1 + 1)
        h = torch.max(torch.Tensor([0]), yy2 - yy1 + 1)
        inter = w * h

        areas = (pos[:,2] - pos[:,0] + 1) * (pos[:,3] - pos[:,1] + 1)

        # overall between currently highest scoring box and all boxes.
        iou = inter / (areas[0] + areas - inter)

        match_weigths = iou
        match_scores = scores * match_weigths

        avg_score = torch.sum(match_scores) / torch.sum(match_weigths)
        merged_score_list.append(avg_score)

        avg_pos = torch.tensor([torch.sum(pos[:,0] * match_scores) / torch.sum(match_scores),
                                torch.sum(pos[:, 1] * match_scores) / torch.sum(match_scores),
                                torch.sum(pos[:, 2] * match_scores) / torch.sum(match_scores),
                                torch.sum(pos[:, 3] * match_scores) / torch.sum(match_scores)])
        merged_box_list.append(avg_pos)


    # convert the boxes in the "xywh" form and tensors again
    keep_bboxes = change_box_order(torch.stack(merged_box_list),
                                   order="xyxy2xywh").to(device)

    keep_scores = torch.tensor(merged_score_list, dtype=torch.float32).to(device)

    return keep_bboxes, keep_scores
Esempio n. 11
0
def my_merging(bboxes, scores, crop_center_factor, heatmap_factor, thr=0.2):
    # bboxes and score must be tensors
    if isinstance(bboxes, list):
        device = bboxes[0].device
        bboxes = torch.stack(bboxes).to("cpu")
    else:
        device = bboxes.device
        bboxes = bboxes.to("cpu")

    if isinstance(scores, list):
        scores = np.asarray(torch.stack(scores).to("cpu"))

    if isinstance(crop_center_factor, list):
        crop_center_factor = np.asarray(crop_center_factor)

    if isinstance(heatmap_factor, list):
        heatmap_factor = np.asarray(heatmap_factor)

    # change the order from xywh to xyxy
    bboxes = np.asarray(change_box_order(bboxes, order="xywh2xyxy"))

    # order is the sorted index.  maps order to index o[1] = 24 (rank1, ix 24)
    order = scores.argsort()[::-1]

    # limit the amount of bboxes to 300 (or less)
    if len(order) > 300:
        limit = 300
    else:
        limit = len(order)
    order = order[:limit]
    bboxes = bboxes[order]
    scores = scores[order]
    crop_center_factor = crop_center_factor[order]
    heatmap_factor = heatmap_factor[order]
    order = scores.argsort()[::-1]

    # define list for bboxes to keep
    keep_scores = []
    keep_bboxes = []

    # seperate coordinates
    x1 = bboxes[:, 0]
    y1 = bboxes[:, 1]
    x2 = bboxes[:, 2]
    y2 = bboxes[:, 3]

    # calculate the area of each box
    areas = (y2 - y1 + 1) * (x2 - x1 + 1)

    while order.size > 0:
        i = order[0]  # higehst scoring element
        xx1 = np.maximum(x1[i], x1[order])
        yy1 = np.maximum(y1[i], y1[order])
        xx2 = np.minimum(x2[i], x2[order])
        yy2 = np.minimum(y2[i], y2[order])

        w = np.maximum(0.0, xx2 - xx1 + 1)
        h = np.maximum(0.0, yy2 - yy1 + 1)
        inter = w * h

        # overall between currently highest scoring box and all boxes.
        iou = inter / (areas[i] + areas[order] - inter)

        # get all the predictions that match the current box to build one cluster.
        #criterium = (iou > thr) | (inter / areas[order] > 0.5)
        criterium = (iou > thr)
        matches = np.argwhere(criterium)

        match_iou = iou[matches]
        match_areas = areas[order[matches]]
        match_scores = scores[order[matches]]
        match_ccf = crop_center_factor[order[matches]]
        match_hf = heatmap_factor[order[matches]]
        #print(match_scores)

        # weight all scores in cluster by patch factors, and size.
        match_score_weights =  match_iou * match_ccf # / match_hf # * match_areas
        #match_score_weights =  np.ones_like(match_scores)
        #match_score_weights = match_iou
        match_scores *= match_score_weights

        # compute weighted average score for the cluster
        avg_score = np.sum(match_scores) / np.sum(match_score_weights)
        #avg_score = np.sum(match_scores) / len(match_scores)

        # compute weighted average of coordinates for the cluster. now only take existing
        # predictions into account.
        avg_coords = [
            np.sum(x1[order[matches]] * match_scores) / np.sum(match_scores),
            np.sum(y1[order[matches]] * match_scores) / np.sum(match_scores),
            np.sum(x2[order[matches]] * match_scores) / np.sum(match_scores),
            np.sum(y2[order[matches]] * match_scores) / np.sum(match_scores)]

        # some clusters might have very low scores due to high amounts of missing predictions.
        # filter out the with a conservative threshold, to speed up evaluation.
        if avg_score > 0.05:
            keep_scores.append(avg_score)
            keep_bboxes.append(avg_coords)

        # get index of all elements that were not matched and discard all others.
        inds = np.where(~criterium)[0]
        order = order[inds]

    # convert the boxes in the "xywh" form and tensors again
    keep_bboxes = change_box_order(torch.tensor(keep_bboxes,
                                                dtype=torch.float32),
                                   order="xyxy2xywh").to(device)

    keep_scores = torch.tensor(keep_scores, dtype=torch.float32).to(device)

    return keep_bboxes, keep_scores