示例#1
0
    def __init__(self, hparams: Dict[str, Any],
                 weight_path: Union[Path, str]) -> None:
        super().__init__()
        self.hparams = hparams
        self.model = object_from_dict(self.hparams["model"])

        corrections: Dict[str, str] = {"model.": ""}
        checkpoint = load_checkpoint(file_path=weight_path,
                                     rename_in_layers=corrections)
        self.model.load_state_dict(checkpoint["state_dict"])
示例#2
0
    def __init__(self, hparams: Dict[str, Any]):
        super().__init__()
        self.hparams = hparams

        self.prior_box = object_from_dict(
            self.hparams["prior_box"], image_size=self.hparams["image_size"])
        self.model = object_from_dict(self.hparams["model"])
        corrections: Dict[str, str] = {"model.": ""}

        if "weights" in self.hparams:
            checkpoint = load_checkpoint(file_path=self.hparams["weights"],
                                         rename_in_layers=corrections)
            self.model.load_state_dict(checkpoint["state_dict"])

        if hparams["sync_bn"]:
            self.model = apex.parallel.convert_syncbn_model(self.model)

        self.loss_weights = self.hparams["loss_weights"]

        self.loss = object_from_dict(self.hparams["loss"],
                                     priors=self.prior_box)
示例#3
0
def main():
    args = get_args()
    # torch.distributed.init_process_group(backend="nccl",  rank=args.local_rank)

    with open(args.config_path) as f:
        hparams = yaml.load(f, Loader=yaml.SafeLoader)

    hparams['model']['pretrained'] = False

    hparams.update({
        "json_path":
        args.output_path,
        "visualize":
        args.visualize,
        "confidence_threshold":
        args.confidence_threshold,
        "nms_threshold":
        args.nms_threshold,
        "keep_top_k":
        args.keep_top_k,
        "local_rank":
        args.local_rank,
        "prior_box":
        object_from_dict(hparams["prior_box"],
                         image_size=[args.max_size, args.max_size]),
        "fp16":
        args.fp16,
    })

    if args.visualize:
        output_vis_path = args.output_path / "viz"
        output_vis_path.mkdir(parents=True, exist_ok=True)
        hparams["output_vis_path"] = output_vis_path

    output_label_path = args.output_path / "labels"
    output_label_path.mkdir(parents=True, exist_ok=True)
    hparams["output_label_path"] = output_label_path

    device = torch.device('cuda')
    # device = torch.device("cuda", args.local_rank)

    model = object_from_dict(hparams["model"])
    model = model.to(device)

    if args.fp16:
        model = model.half()

    corrections: Dict[str, str] = {"model.": ""}
    checkpoint = load_checkpoint(file_path=args.weight_path,
                                 rename_in_layers=corrections)
    model.load_state_dict(checkpoint["state_dict"])

    # model = torch.nn.parallel.DistributedDataParallel(
    #     model, device_ids=[args.local_rank], output_device=args.local_rank
    # )

    file_paths = sorted([x for x in args.input_path.rglob("*") if x.is_file()])

    dataset = InferenceDataset(file_paths,
                               max_size=args.max_size,
                               transform=from_dict(hparams["test_aug"]))

    # sampler = DistributedSampler(dataset, shuffle=False)

    dataloader = torch.utils.data.DataLoader(
        dataset,
        batch_size=args.batch_size,
        num_workers=args.num_workers,
        pin_memory=True,
        shuffle=False,
        drop_last=False,
        # sampler=sampler,
    )

    predict(dataloader, model, hparams, device)