def check(model_name, fold, checkpoint):
    model_info = MODELS[model_name]
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

    model = torch.load(checkpoint, map_location=device)
    model = model.to(device)
    model.eval()

    dataset_valid = NihDataset(fold=fold, img_size=model_info.img_size, is_training=False)

    dataloader_valid = DataLoader(dataset_valid,
                                  num_workers=1,
                                  batch_size=1,
                                  shuffle=False)

    data_iter = tqdm(enumerate(dataloader_valid), total=len(dataloader_valid))
    for iter_num, data in data_iter:
        labels = data['categories'].cuda().float()

        outputs = model(data['img'].cuda().float())

        outputs = outputs.cpu().detach().numpy()

        print(outputs, labels)

        plt.cla()
        plt.imshow(data['img'][0, 0].cpu().detach().numpy())
        plt.show()
Пример #2
0
def generate_predictions(model_name, run, fold, from_epoch=0, to_epoch=100):
    run_str = '' if run is None or run == '' else f'_{run}'
    predictions_dir = f'../output/oof2/{model_name}{run_str}_fold_{fold}'
    os.makedirs(predictions_dir, exist_ok=True)

    model_info = MODELS[model_name]
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

    for epoch_num in range(from_epoch, to_epoch):
        prediction_fn = f'{predictions_dir}/{epoch_num:03}.pkl'
        if os.path.exists(prediction_fn):
            continue
        print('epoch', epoch_num)
        checkpoint = f'checkpoints/{model_name}{run_str}_fold_{fold}/{model_name}_{epoch_num:03}.pt'
        print('load', checkpoint)
        try:
            model = torch.load(checkpoint, map_location=device)
        except FileNotFoundError:
            break
        model = model.to(device)
        model.eval()

        dataset_valid = DetectionDataset(fold=fold,
                                         img_size=model_info.img_size,
                                         is_training=False,
                                         images={})

        dataloader_valid = DataLoader(
            dataset_valid,
            num_workers=2,
            batch_size=1,
            shuffle=False,
            collate_fn=pytorch_retinanet.dataloader.collater2d)

        oof = collections.defaultdict(list)

        # for iter_num, data in tqdm(enumerate(dataloader_valid), total=len(dataloader_valid)):
        for iter_num, data in tqdm(enumerate(dataset_valid),
                                   total=len(dataloader_valid)):
            data = pytorch_retinanet.dataloader.collater2d([data])
            img = data['img'].to(device).float()
            nms_scores, global_classification, transformed_anchors = \
                model(img, return_loss=False, return_boxes=True)

            nms_scores = nms_scores.cpu().detach().numpy()
            global_classification = global_classification.cpu().detach().numpy(
            )
            transformed_anchors = transformed_anchors.cpu().detach().numpy()

            oof['gt_boxes'].append(data['annot'].cpu().detach().numpy())
            oof['gt_category'].append(data['category'].cpu().detach().numpy())

            oof['boxes'].append(transformed_anchors)
            oof['scores'].append(nms_scores)
            oof['category'].append(global_classification)

        pickle.dump(oof, open(prediction_fn, 'wb'))
Пример #3
0
def load_model(checkpoint: str) -> nn.Module:
    """
    Helper to load model weihts
    """
    print(f"Loading model from: {checkpoint}")
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    # load model
    model = torch.load(checkpoint)
    model = model.to(device)
    model.eval()
    # model = torch.nn.DataParallel(model).cuda()
    return model
Пример #4
0
def check(model_name, fold, checkpoint):
    model_info = MODELS[model_name]
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

    model = torch.load(checkpoint, map_location=device)
    model = model.to(device)
    model.eval()

    dataset_valid = DetectionDataset(fold=fold,
                                     img_size=model_info.img_size,
                                     is_training=False,
                                     images={})

    dataloader_valid = DataLoader(
        dataset_valid,
        num_workers=1,
        batch_size=1,
        shuffle=False,
        collate_fn=pytorch_retinanet.dataloader.collater2d)

    data_iter = tqdm(enumerate(dataloader_valid), total=len(dataloader_valid))
    for iter_num, data in data_iter:
        classification_loss, regression_loss, global_classification_loss, nms_scores, nms_class, transformed_anchors = \
            model([data['img'].to(device).float(), data['annot'].to(device).float(), data['category'].cuda()],
                  return_loss=True, return_boxes=True)

        nms_scores = nms_scores.cpu().detach().numpy()
        nms_class = nms_class.cpu().detach().numpy()
        transformed_anchors = transformed_anchors.cpu().detach().numpy()

        print(nms_scores, transformed_anchors.shape)
        print('cls loss:', float(classification_loss), 'global cls loss:',
              global_classification_loss, ' reg loss:', float(regression_loss))
        print('cat:', data['category'].numpy()[0], np.exp(nms_class[0]),
              dataset_valid.categories[data['category'][0]])

        plt.cla()
        plt.imshow(data['img'][0, 0].cpu().detach().numpy())

        gt = data['annot'].cpu().detach().numpy()[0]
        for i in range(gt.shape[0]):
            if np.all(np.isfinite(gt[i])):
                p0 = gt[i, 0:2]
                p1 = gt[i, 2:4]
                plt.gca().add_patch(
                    plt.Rectangle(p0,
                                  width=(p1 - p0)[0],
                                  height=(p1 - p0)[1],
                                  fill=False,
                                  edgecolor='b',
                                  linewidth=2))

        for i in range(len(nms_scores)):
            nms_score = nms_scores[i]
            if nms_score < 0.1:
                break
            # print(transformed_anchors[i, :])

            p0 = transformed_anchors[i, 0:2]
            p1 = transformed_anchors[i, 2:4]

            color = 'g'
            if nms_score < 0.4:
                color = 'y'
            if nms_score < 0.25:
                color = 'r'

            # print(p0, p1)
            plt.gca().add_patch(
                plt.Rectangle(p0,
                              width=(p1 - p0)[0],
                              height=(p1 - p0)[1],
                              fill=False,
                              edgecolor=color,
                              linewidth=2))
            plt.gca().text(
                p0[0], p0[1], f'{nms_score:.3f}',
                color=color)  # , bbox={'facecolor': color, 'alpha': 0.5})
        plt.show()

        print(nms_scores)
Пример #5
0
def generate_predictions(
    model_name: str,
    fold: int,
    debug: bool,
    weights_dir: str,
    from_epoch: int = 0,
    to_epoch: int = 10,
    save_oof: bool = True,
    run: str = None,
):
    """
    Loads model weights the epoch checkpoints, 
    calculates oof predictions for and saves them to pickle
    
    Args: 
        model_name : string name from the models configs listed in models.py file
        fold       : evaluation fold number, 0-3
        debug      : if True, runs debugging on few images 
        weights_dir: directory qith model weigths
        from_epoch : the first epoch for predicitions generation 
        to_epoch   : the last epoch for predicitions generation 
        save_oof   : boolean flag weathe rto save precitions
        run        : string name to be added in the experinet name
    """
    predictions_dir = f"{RESULTS_DIR}/test1/{model_name}_fold_{fold}"
    os.makedirs(predictions_dir, exist_ok=True)

    model_info = MODELS[model_name]
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    print("device: ", device)

    for epoch_num in range(from_epoch, to_epoch):
        prediction_fn = f"{predictions_dir}/{epoch_num:03}.pkl"
        if os.path.exists(prediction_fn):
            continue
        print("epoch", epoch_num)
        # load model checkpoint
        checkpoint = (
            f"{weights_dir}/{model_name}_fold_{fold}/{model_name}_{epoch_num:03}.pt"
        )
        print("load", checkpoint)
        try:
            model = torch.load(checkpoint)
        except FileNotFoundError:
            break
        model = model.to(device)
        model.eval()
        # load data
        dataset_valid = DatasetValid(
            is_training=False,
            meta_file="stage_1_test_meta.csv",
            debug=debug,
            img_size=512,
        )
        dataloader_valid = DataLoader(
            dataset_valid,
            num_workers=2,
            batch_size=4,
            shuffle=False,
            collate_fn=pytorch_retinanet.dataloader.collater2d,
        )

        oof = collections.defaultdict(list)
        for iter_num, data in tqdm(enumerate(dataset_valid),
                                   total=len(dataloader_valid)):
            data = pytorch_retinanet.dataloader.collater2d([data])
            img = data["img"].to(device).float()
            nms_scores, global_classification, transformed_anchors = model(
                img, return_loss=False, return_boxes=True)
            # model outputs to numpy
            nms_scores = nms_scores.cpu().detach().numpy()
            global_classification = global_classification.cpu().detach().numpy(
            )
            transformed_anchors = transformed_anchors.cpu().detach().numpy()
            # out-of-fold predictions
            oof["gt_boxes"].append(data["annot"].cpu().detach().numpy())
            oof["gt_category"].append(data["category"].cpu().detach().numpy())
            oof["boxes"].append(transformed_anchors)
            oof["scores"].append(nms_scores)
            oof["category"].append(global_classification)
        # save epoch predictions
        if save_oof:
            pickle.dump(oof, open(f"{predictions_dir}/{epoch_num:03}.pkl",
                                  "wb"))
Пример #6
0
def test_model(model_name: str, fold: int, debug: bool, checkpoint: str,
               pics_dir: str):
    """
    Loads model weights from the checkpoint, plots ground truth and predictions
    
    Args: 
        model_name : string name from the models configs listed in models.py file
        fold       : evaluation fold number, 0-3
        debug      : if True, runs debugging on few images 
        checkpoint : directory with weights (if avaialable) 
        pics_dir   : directory for saving prediction images 
       
    """
    model_info = MODELS[model_name]
    device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    # load model
    model = torch.load(checkpoint)
    model = model.to(device)
    model.eval()
    # load data
    dataset_valid = DetectionDataset(fold=fold,
                                     img_size=model_info.img_size,
                                     is_training=False,
                                     debug=debug)
    dataloader_valid = DataLoader(
        dataset_valid,
        num_workers=1,
        batch_size=1,
        shuffle=False,
        collate_fn=pytorch_retinanet.dataloader.collater2d,
    )

    data_iter = tqdm(enumerate(dataloader_valid), total=len(dataloader_valid))
    for iter_num, data in data_iter:
        (
            classification_loss,
            regression_loss,
            global_classification_loss,
            nms_scores,
            nms_class,
            transformed_anchors,
        ) = model(
            [
                data["img"].to(device).float(),
                data["annot"].to(device).float(),
                data["category"].cuda(),
            ],
            return_loss=True,
            return_boxes=True,
        )

        nms_scores = nms_scores.cpu().detach().numpy()
        nms_class = nms_class.cpu().detach().numpy()
        transformed_anchors = transformed_anchors.cpu().detach().numpy()
        print("nms_scores {}, transformed_anchors.shape {}".format(
            nms_scores, transformed_anchors.shape))
        print(
            "cls loss:",
            float(classification_loss),
            "global cls loss:",
            global_classification_loss,
            " reg loss:",
            float(regression_loss),
        )
        print(
            "category:",
            data["category"].numpy()[0],
            np.exp(nms_class[0]),
            dataset_valid.categories[data["category"][0]],
        )

        # plot data and ground truth
        plt.figure(iter_num, figsize=(6, 6))
        plt.cla()
        plt.imshow(data["img"][0, 0].cpu().detach().numpy(),
                   cmap=plt.cm.gist_gray)
        plt.axis("off")
        gt = data["annot"].cpu().detach().numpy()[0]
        for i in range(gt.shape[0]):
            if np.all(np.isfinite(gt[i])):
                p0 = gt[i, 0:2]
                p1 = gt[i, 2:4]
                plt.gca().add_patch(
                    plt.Rectangle(
                        p0,
                        width=(p1 - p0)[0],
                        height=(p1 - p0)[1],
                        fill=False,
                        edgecolor="b",
                        linewidth=2,
                    ))
        # add predicted boxes to the plot
        for i in range(len(nms_scores)):
            nms_score = nms_scores[i]
            if nms_score < 0.1:
                break
            p0 = transformed_anchors[i, 0:2]
            p1 = transformed_anchors[i, 2:4]
            color = "r"
            if nms_score < 0.3:
                color = "y"
            if nms_score < 0.25:
                color = "g"
            plt.gca().add_patch(
                plt.Rectangle(
                    p0,
                    width=(p1 - p0)[0],
                    height=(p1 - p0)[1],
                    fill=False,
                    edgecolor=color,
                    linewidth=2,
                ))
            plt.gca().text(p0[0], p0[1], f"{nms_score:.3f}", color=color)
        plt.show()

        os.makedirs(pics_dir, exist_ok=True)
        plt.savefig(
            f"{pics_dir}/predict_{iter_num}.eps",
            dpi=300,
            bbox_inches="tight",
            pad_inches=0,
        )
        plt.savefig(
            f"{pics_dir}/predict_{iter_num}.png",
            dpi=300,
            bbox_inches="tight",
            pad_inches=0,
        )
        plt.close()
        print(nms_scores)