Exemplo n.º 1
0
def train(epochs: int) -> None:
    train_dataset = TrainDataset(
        cfg.input_size,
        object_count_range=cfg.object_count_range,
        object_size_range=cfg.object_size_range,
        num_samples=1024,
    )
    test_dataset = TrainDataset(
        cfg.input_size,
        object_count_range=cfg.object_count_range,
        object_size_range=cfg.object_size_range,
        num_samples=256,
    )
    backbone = EfficientNetBackbone(1,
                                    out_channels=cfg.channels,
                                    pretrained=True)
    model = CenterNetV1(
        channels=cfg.channels,
        backbone=backbone,
        out_idx=cfg.out_idx,
        box_depth=cfg.box_depth,
        anchors=Anchors(size=cfg.anchor_size),
    )
    mkmaps = MkCornerMaps()
    criterion = Criterion(
        box_weight=cfg.box_weight,
        heatmap_weight=cfg.heatmap_weight,
        mkmaps=mkmaps,
    )
    train_loader = DataLoader(train_dataset,
                              collate_fn=collate_fn,
                              batch_size=cfg.batch_size,
                              shuffle=True)
    test_loader = DataLoader(test_dataset,
                             collate_fn=collate_fn,
                             batch_size=cfg.batch_size * 2,
                             shuffle=True)
    optimizer = torch.optim.AdamW(model.parameters(), lr=cfg.lr)
    visualize = Visualize(cfg.out_dir, "test", limit=2)

    model_loader = ModelLoader(
        out_dir=cfg.out_dir,
        key=cfg.metric[0],
        best_watcher=BestWatcher(mode=cfg.metric[1]),
    )
    to_boxes = ToBoxes(threshold=cfg.to_boxes_threshold, use_peak=cfg.use_peak)
    get_score = MeanPrecition()
    trainer = Trainer(
        model=model,
        train_loader=train_loader,
        test_loader=test_loader,
        model_loader=model_loader,
        optimizer=optimizer,
        visualize=visualize,
        criterion=criterion,
        device="cuda",
        get_score=get_score,
        to_boxes=to_boxes,
    )
    trainer(epochs)
Exemplo n.º 2
0
def train(epochs: int) -> None:
    train_dataset = TrainDataset(
        config.input_size,
        object_count_range=config.object_count_range,
        object_size_range=config.object_size_range,
        num_samples=1024,
    )
    test_dataset = TrainDataset(
        config.input_size,
        object_count_range=config.object_count_range,
        object_size_range=config.object_size_range,
        num_samples=256,
    )
    backbone = EfficientNetBackbone(2,
                                    out_channels=config.channels,
                                    pretrained=True)
    anchors = Anchors(size=config.anchor_size, ratios=config.anchor_ratios)
    model = EfficientDet(num_classes=1,
                         channels=config.channels,
                         backbone=backbone,
                         anchors=anchors)
    model_loader = ModelLoader(
        out_dir=config.out_dir,
        key=config.metric[0],
        best_watcher=BestWatcher(mode=config.metric[1]),
    )
    criterion = Criterion()
    optimizer = torch.optim.Adam(model.parameters(), lr=config.lr)
    visualize = Visualize("/store/efficientdet", "test", limit=2)
    get_score = MeanPrecition()
    to_boxes = ToBoxes(confidence_threshold=config.confidence_threshold, )
    trainer = Trainer(
        model,
        DataLoader(
            train_dataset,
            collate_fn=collate_fn,
            batch_size=config.batch_size,
            shuffle=True,
        ),
        DataLoader(
            test_dataset,
            collate_fn=collate_fn,
            batch_size=config.batch_size * 2,
            shuffle=True,
        ),
        model_loader=model_loader,
        optimizer=optimizer,
        visualize=visualize,
        criterion=criterion,
        get_score=get_score,
        device="cuda",
        to_boxes=to_boxes,
    )
    trainer(epochs)
Exemplo n.º 3
0
def evaluate(limit: int = 100) -> None:
    backbone = EfficientNetBackbone(config.effdet_id,
                                    out_channels=config.channels)
    anchors = Anchors(
        size=config.anchor_size,
        ratios=config.anchor_ratios,
        scales=config.anchor_scales,
    )
    model = EfficientDet(
        num_classes=1,
        channels=config.channels,
        backbone=backbone,
        anchors=anchors,
        out_ids=config.out_ids,
    )
    model_loader = ModelLoader(
        out_dir=config.out_dir,
        key=config.metric[0],
        best_watcher=BestWatcher(mode=config.metric[1]),
    )
    box_merge = BoxMerge(iou_threshold=config.iou_threshold,
                         confidence_threshold=config.final_threshold)
    dataset = Subset(
        WheatDataset(
            annot_file=config.annot_file,
            image_dir=config.train_image_dir,
            max_size=config.max_size,
            mode="test",
        ),
        list(range(limit)),
    )
    to_boxes = ToBoxes(confidence_threshold=config.confidence_threshold)
    data_loader = DataLoader(
        dataset=dataset,
        collate_fn=_collate_fn,
        batch_size=config.batch_size * 2,
        shuffle=False,
    )
    predictor = Predictor(
        model=model,
        loader=data_loader,
        model_loader=model_loader,
        device=config.device,
        box_merge=box_merge,
        to_boxes=to_boxes,
    )
    boxes_list, confs_list, ids = predictor()
    gt_boxes_list = [dataset[i][2] for i in range(len(dataset))]
    get_score = MeanPrecition()
    score = np.mean([
        get_score(x, y.to(x.device)) for x, y in zip(boxes_list, gt_boxes_list)
    ])
    print(score)
Exemplo n.º 4
0
def predict() -> Tuple[List[YoloBoxes], List[Confidences], List[ImageId]]:
    backbone = EfficientNetBackbone(config.effdet_id, out_channels=config.channels)
    model = CenterNetV1(
        channels=config.channels,
        backbone=backbone,
        out_idx=config.out_idx,
        fpn_depth=config.fpn_depth,
        hm_depth=config.hm_depth,
        box_depth=config.box_depth,
    )
    dataset = PredictionDataset(
        image_dir=config.test_image_dir, max_size=config.max_size,
    )

    data_loader = DataLoader(
        dataset=dataset,
        collate_fn=prediction_collate_fn,
        batch_size=config.batch_size,
        shuffle=False,
    )
    box_merge = BoxMerge(
        iou_threshold=config.iou_threshold, confidence_threshold=config.final_threshold
    )
    model_loader = ModelLoader(
        out_dir=config.out_dir,
        key=config.metric[0],
        best_watcher=BestWatcher(mode=config.metric[1]),
    )
    to_boxes = ToBoxes(threshold=config.confidence_threshold, use_peak=config.use_peak,)
    predictor = Predictor(
        model=model,
        loader=data_loader,
        model_loader=model_loader,
        device=config.device,
        box_merge=box_merge,
        to_boxes=to_boxes,
    )
    return predictor()
Exemplo n.º 5
0
def train(epochs: int) -> None:
    train_dataset = WheatDataset(
        image_dir=config.train_image_dir,
        annot_file=config.annot_file,
        max_size=config.max_size,
        mode="train",
    )
    test_dataset = WheatDataset(
        image_dir=config.train_image_dir,
        annot_file=config.annot_file,
        max_size=config.max_size,
        mode="test",
    )
    fold_keys = [x[2].shape[0] // 30 for x in test_dataset.rows]
    train_idx, test_idx = list(kfold(n_splits=config.n_splits, keys=fold_keys))[
        config.fold_idx
    ]

    train_loader = DataLoader(
        Subset(train_dataset, train_idx),
        batch_size=config.batch_size,
        drop_last=True,
        shuffle=True,
        collate_fn=collate_fn,
        num_workers=config.num_workers,
    )
    test_loader = DataLoader(
        Subset(test_dataset, test_idx),
        batch_size=config.batch_size,
        drop_last=False,
        shuffle=False,
        collate_fn=collate_fn,
        num_workers=config.num_workers,
    )
    backbone = EfficientNetBackbone(
        config.effdet_id, out_channels=config.channels, pretrained=config.pretrained
    )
    anchors = Anchors(
        size=config.anchor_size,
        ratios=config.anchor_ratios,
        scales=config.anchor_scales,
    )
    model = EfficientDet(
        num_classes=1,
        channels=config.channels,
        backbone=backbone,
        anchors=anchors,
        out_ids=config.out_ids,
    )
    model_loader = ModelLoader(
        out_dir=config.out_dir,
        key=config.metric[0],
        best_watcher=BestWatcher(mode=config.metric[1]),
    )
    box_merge = BoxMerge(
        iou_threshold=config.iou_threshold, confidence_threshold=config.final_threshold
    )
    criterion = Criterion(
        label_weight=config.label_weight,
        pos_loss=PosLoss(iou_threshold=config.pos_threshold),
        size_loss=SizeLoss(iou_threshold=config.size_threshold),
        label_loss=LabelLoss(iou_thresholds=config.label_thresholds),
    )
    visualize = Visualize(config.out_dir, "test", limit=5, show_probs=True)
    optimizer = torch.optim.AdamW(model.parameters(), lr=config.lr,)
    to_boxes = ToBoxes(confidence_threshold=config.confidence_threshold)
    Trainer(
        model=model,
        train_loader=train_loader,
        test_loader=test_loader,
        model_loader=model_loader,
        optimizer=optimizer,
        visualize=visualize,
        device=config.device,
        criterion=criterion,
        get_score=MeanPrecition(),
        to_boxes=to_boxes,
        box_merge=box_merge,
    )(epochs)
Exemplo n.º 6
0
def train(epochs: int) -> None:
    train_dataset = WheatDataset(
        image_dir=config.train_image_dir,
        annot_file=config.annot_file,
        max_size=config.max_size,
        mode="train",
    )
    test_dataset = WheatDataset(
        image_dir=config.train_image_dir,
        annot_file=config.annot_file,
        max_size=config.max_size,
        mode="test",
    )
    fold_keys = [x[2].shape[0] // 30 for x in test_dataset.rows]
    train_idx, test_idx = list(kfold(n_splits=config.n_splits,
                                     keys=fold_keys))[config.fold_idx]

    train_loader = DataLoader(
        Subset(train_dataset, train_idx),
        batch_size=config.batch_size,
        drop_last=True,
        shuffle=True,
        collate_fn=collate_fn,
        num_workers=config.num_workers,
    )
    test_loader = DataLoader(
        Subset(test_dataset, test_idx),
        batch_size=config.batch_size,
        drop_last=False,
        shuffle=False,
        collate_fn=collate_fn,
        num_workers=config.num_workers,
    )
    backbone = EfficientNetBackbone(config.effdet_id,
                                    out_channels=config.channels,
                                    pretrained=config.pretrained)
    model = CenterNetV1(
        channels=config.channels,
        backbone=backbone,
        out_idx=config.out_idx,
        fpn_depth=config.fpn_depth,
        hm_depth=config.hm_depth,
        box_depth=config.box_depth,
    )
    model_loader = ModelLoader(
        out_dir=config.out_dir,
        key=config.metric[0],
        best_watcher=BestWatcher(mode=config.metric[1]),
    )
    box_merge = BoxMerge(iou_threshold=config.iou_threshold,
                         confidence_threshold=config.final_threshold)
    criterion = Criterion(
        heatmap_weight=config.heatmap_weight,
        box_weight=config.box_weight,
        mk_maps=MkMaps(
            sigma=config.sigma,
            mode=config.map_mode,
        ),
    )

    visualize = Visualize(config.out_dir,
                          "centernet",
                          limit=5,
                          show_probs=True)
    optimizer = torch.optim.AdamW(
        model.parameters(),
        lr=config.lr,
    )
    to_boxes = ToBoxes(
        threshold=config.confidence_threshold,
        use_peak=config.use_peak,
    )
    Trainer(
        model=model,
        train_loader=train_loader,
        test_loader=test_loader,
        model_loader=model_loader,
        optimizer=optimizer,
        visualize=visualize,
        device=config.device,
        criterion=criterion,
        get_score=MeanPrecition(),
        to_boxes=to_boxes,
        box_merge=box_merge,
    )(epochs)
Exemplo n.º 7
0
out_idx = 4
channels = 128
metric: tuple[str, WatchMode] = ("score", "max")
backbone_id = 3

cls_depth = 1
out_dir = f"/store/kp-{backbone_id}-{cls_depth}"

backbone = EfficientNetBackbone(
    backbone_id, out_channels=channels, pretrained=True
)
net = Net(
    backbone=backbone,
    num_classes=num_classes,
    channels=channels,
    out_idx=out_idx,
    cls_depth=cls_depth,
)
to_points = ToPoints(threshold=confidence_threshold)
mkmaps = MkPointMaps(
    num_classes=num_classes,
    sigma=sigma,
)
hmloss = HMLoss()

model_loader = ModelLoader(
    out_dir=out_dir,
    key="score",
    best_watcher=BestWatcher(mode="min"),
)