Esempio n. 1
0
def train(epochs: int) -> None:
    train_dataset = TrainDataset(
        cfg.input_size,
        object_count_range=cfg.object_count_range,
        object_size_range=cfg.object_size_range,
        num_samples=1024,
    )
    test_dataset = TrainDataset(
        cfg.input_size,
        object_count_range=cfg.object_count_range,
        object_size_range=cfg.object_size_range,
        num_samples=256,
    )
    backbone = EfficientNetBackbone(1,
                                    out_channels=cfg.channels,
                                    pretrained=True)
    model = CenterNetV1(
        channels=cfg.channels,
        backbone=backbone,
        out_idx=cfg.out_idx,
        box_depth=cfg.box_depth,
        anchors=Anchors(size=cfg.anchor_size),
    )
    mkmaps = MkCornerMaps()
    criterion = Criterion(
        box_weight=cfg.box_weight,
        heatmap_weight=cfg.heatmap_weight,
        mkmaps=mkmaps,
    )
    train_loader = DataLoader(train_dataset,
                              collate_fn=collate_fn,
                              batch_size=cfg.batch_size,
                              shuffle=True)
    test_loader = DataLoader(test_dataset,
                             collate_fn=collate_fn,
                             batch_size=cfg.batch_size * 2,
                             shuffle=True)
    optimizer = torch.optim.AdamW(model.parameters(), lr=cfg.lr)
    visualize = Visualize(cfg.out_dir, "test", limit=2)

    model_loader = ModelLoader(
        out_dir=cfg.out_dir,
        key=cfg.metric[0],
        best_watcher=BestWatcher(mode=cfg.metric[1]),
    )
    to_boxes = ToBoxes(threshold=cfg.to_boxes_threshold, use_peak=cfg.use_peak)
    get_score = MeanPrecition()
    trainer = Trainer(
        model=model,
        train_loader=train_loader,
        test_loader=test_loader,
        model_loader=model_loader,
        optimizer=optimizer,
        visualize=visualize,
        criterion=criterion,
        device="cuda",
        get_score=get_score,
        to_boxes=to_boxes,
    )
    trainer(epochs)
def test_hfliptta() -> None:
    to_boxes = ToBoxes(threshold=0.1)
    fn = HFlipTTA(to_boxes)
    images = ImageBatch(torch.zeros((1, 3, 64, 64)))
    images[:, :, 0, 0] = torch.ones((1, 3))

    channels = 32
    backbone = ResNetBackbone("resnet34", out_channels=channels)
    model = CenterNetV1(channels=channels, backbone=backbone, out_idx=6,)
    fn(model, images)
Esempio n. 3
0
def evaluate(limit: int = 100) -> None:
    backbone = EfficientNetBackbone(config.effdet_id, out_channels=config.channels)
    model = CenterNetV1(
        channels=config.channels,
        backbone=backbone,
        out_idx=config.out_idx,
        fpn_depth=config.fpn_depth,
        hm_depth=config.hm_depth,
        box_depth=config.box_depth,
    )
    model_loader = ModelLoader(
        out_dir=config.out_dir,
        key=config.metric[0],
        best_watcher=BestWatcher(mode=config.metric[1]),
    )
    box_merge = BoxMerge(
        iou_threshold=config.iou_threshold, confidence_threshold=config.final_threshold
    )
    dataset = Subset(
        WheatDataset(
            annot_file=config.annot_file,
            image_dir=config.train_image_dir,
            max_size=config.max_size,
            mode="test",
        ),
        list(range(limit)),
    )
    to_boxes = ToBoxes(threshold=config.confidence_threshold, use_peak=config.use_peak,)
    data_loader = DataLoader(
        dataset=dataset,
        collate_fn=_collate_fn,
        batch_size=config.batch_size * 2,
        shuffle=False,
    )
    predictor = Predictor(
        model=model,
        loader=data_loader,
        model_loader=model_loader,
        device=config.device,
        box_merge=box_merge,
        to_boxes=to_boxes,
    )
    boxes_list, confs_list, ids = predictor()
    gt_boxes_list = [dataset[i][2] for i in range(len(dataset))]
    get_score = MeanPrecition()
    score = np.mean(
        [get_score(x, y.to(x.device)) for x, y in zip(boxes_list, gt_boxes_list)]
    )
    print(score)
def test_mkcornermaps(h: int, w: int, cy: int, cx: int, dy: float, dx: float) -> None:
    in_boxes = YoloBoxes(torch.tensor([[0.201, 0.402, 0.1, 0.3]]))
    to_boxes = ToBoxes(threshold=0.1)
    mkmaps = MkCornerMaps()
    hm = mkmaps([in_boxes], (h, w), (h * 10, w * 10))
    assert hm.shape == (1, 1, h, w)
    mk_anchors = Anchors()
    anchormap = mk_anchors(hm)
    diffmaps = BoxMaps(torch.zeros((1, *anchormap.shape)))
    diffmaps = in_boxes.view(1, 4, 1, 1).expand_as(diffmaps) - anchormap

    out_box_batch, out_conf_batch = to_boxes((anchormap, diffmaps, hm))
    out_boxes = out_box_batch[0]
    for box in out_boxes:
        assert F.l1_loss(box, in_boxes[0]) < 1e-8
    plot = DetectionPlot(w=w, h=h)
    plot.with_image((hm[0, 0] + 1e-4).log())
    plot.with_yolo_boxes(out_boxes, color="red")
    plot.with_yolo_boxes(in_boxes, color="blue")
    plot.save(f"store/test-corner.png")
def test_mkmaps(h: int, w: int, cy: int, cx: int, dy: float, dx: float) -> None:
    in_boxes = YoloBoxes(torch.tensor([[0.201, 0.402, 0.1, 0.3]]))
    to_boxes = ToBoxes(threshold=0.1)
    mkmaps = MkGaussianMaps(sigma=2.0)
    hm = mkmaps([in_boxes], (h, w), (h * 10, w * 10))
    assert (torch.nonzero(hm.eq(1), as_tuple=False)[0, 2:] - torch.tensor([[cy, cx]])).sum() == 0  # type: ignore
    assert hm.shape == (1, 1, h, w)
    mk_anchors = Anchors()
    anchormap = mk_anchors(hm)
    diffmaps = BoxMaps(torch.zeros((1, *anchormap.shape)))
    diffmaps = in_boxes.view(1, 4, 1, 1).expand_as(diffmaps) - anchormap

    out_box_batch, out_conf_batch = to_boxes((anchormap, diffmaps, hm))
    out_boxes = out_box_batch[0]
    for box in out_boxes:
        assert F.l1_loss(box, in_boxes[0]) < 1e-8
    plot = DetectionPlot(w=w, h=h)
    plot.with_image((hm[0, 0] + 1e-4).log())
    plot.with_yolo_boxes(in_boxes, color="blue")
    plot.with_yolo_boxes(out_boxes, color="red")
    plot.save(f"store/test-heatmapv1.png")
Esempio n. 6
0
def predict() -> Tuple[List[YoloBoxes], List[Confidences], List[ImageId]]:
    backbone = EfficientNetBackbone(config.effdet_id, out_channels=config.channels)
    model = CenterNetV1(
        channels=config.channels,
        backbone=backbone,
        out_idx=config.out_idx,
        fpn_depth=config.fpn_depth,
        hm_depth=config.hm_depth,
        box_depth=config.box_depth,
    )
    dataset = PredictionDataset(
        image_dir=config.test_image_dir, max_size=config.max_size,
    )

    data_loader = DataLoader(
        dataset=dataset,
        collate_fn=prediction_collate_fn,
        batch_size=config.batch_size,
        shuffle=False,
    )
    box_merge = BoxMerge(
        iou_threshold=config.iou_threshold, confidence_threshold=config.final_threshold
    )
    model_loader = ModelLoader(
        out_dir=config.out_dir,
        key=config.metric[0],
        best_watcher=BestWatcher(mode=config.metric[1]),
    )
    to_boxes = ToBoxes(threshold=config.confidence_threshold, use_peak=config.use_peak,)
    predictor = Predictor(
        model=model,
        loader=data_loader,
        model_loader=model_loader,
        device=config.device,
        box_merge=box_merge,
        to_boxes=to_boxes,
    )
    return predictor()
Esempio n. 7
0
def train(epochs: int) -> None:
    train_dataset = WheatDataset(
        image_dir=config.train_image_dir,
        annot_file=config.annot_file,
        max_size=config.max_size,
        mode="train",
    )
    test_dataset = WheatDataset(
        image_dir=config.train_image_dir,
        annot_file=config.annot_file,
        max_size=config.max_size,
        mode="test",
    )
    fold_keys = [x[2].shape[0] // 30 for x in test_dataset.rows]
    train_idx, test_idx = list(kfold(n_splits=config.n_splits,
                                     keys=fold_keys))[config.fold_idx]

    train_loader = DataLoader(
        Subset(train_dataset, train_idx),
        batch_size=config.batch_size,
        drop_last=True,
        shuffle=True,
        collate_fn=collate_fn,
        num_workers=config.num_workers,
    )
    test_loader = DataLoader(
        Subset(test_dataset, test_idx),
        batch_size=config.batch_size,
        drop_last=False,
        shuffle=False,
        collate_fn=collate_fn,
        num_workers=config.num_workers,
    )
    backbone = EfficientNetBackbone(config.effdet_id,
                                    out_channels=config.channels,
                                    pretrained=config.pretrained)
    model = CenterNetV1(
        channels=config.channels,
        backbone=backbone,
        out_idx=config.out_idx,
        fpn_depth=config.fpn_depth,
        hm_depth=config.hm_depth,
        box_depth=config.box_depth,
    )
    model_loader = ModelLoader(
        out_dir=config.out_dir,
        key=config.metric[0],
        best_watcher=BestWatcher(mode=config.metric[1]),
    )
    box_merge = BoxMerge(iou_threshold=config.iou_threshold,
                         confidence_threshold=config.final_threshold)
    criterion = Criterion(
        heatmap_weight=config.heatmap_weight,
        box_weight=config.box_weight,
        mk_maps=MkMaps(
            sigma=config.sigma,
            mode=config.map_mode,
        ),
    )

    visualize = Visualize(config.out_dir,
                          "centernet",
                          limit=5,
                          show_probs=True)
    optimizer = torch.optim.AdamW(
        model.parameters(),
        lr=config.lr,
    )
    to_boxes = ToBoxes(
        threshold=config.confidence_threshold,
        use_peak=config.use_peak,
    )
    Trainer(
        model=model,
        train_loader=train_loader,
        test_loader=test_loader,
        model_loader=model_loader,
        optimizer=optimizer,
        visualize=visualize,
        device=config.device,
        criterion=criterion,
        get_score=MeanPrecition(),
        to_boxes=to_boxes,
        box_merge=box_merge,
    )(epochs)