Exemple #1
0
def main(tag, cfg_path, is_val: bool, rawdata_dir, vis_dir):
    root_dir = Path(__file__).parent
    log_dir = Path(vis_dir)
    log_dir.mkdir(parents=True, exist_ok=True)
    setup_logger(log_dir)
    cfg = load_config_file(cfg_path=cfg_path, log_dir=log_dir, backup=False)

    # build net
    voxelizer = voxelizer_builder.build(voxelizer_cfg=cfg.Voxelizer)
    anchor_generator = anchor_generator_builder.build(anchor_generator_cfg=cfg.AnchorGenerator)
    box_coder = box_coder_builder.build(box_coder_cfg=cfg.BoxCoder)
    similarity_calculator = similarity_calculator_builder.build(similarity_calculator_cfg=cfg.SimilarityCalculator)
    target_assigner = target_assigner_builder.build(target_assigner_cfg=cfg.TargetAssigner,
                                                    box_coder=box_coder,
                                                    anchor_generators=[anchor_generator],
                                                    region_similarity_calculators=[similarity_calculator])
    net = second_builder.build(cfg=cfg.Net, voxelizer=voxelizer, target_assigner=target_assigner).cuda()
    # build dataloader
    val_data = dataloader_builder.build(cfg.Net, cfg.ValDataLoader,
                                          voxelizer, target_assigner, training=False)
    val_dataloader = torch.utils.data.DataLoader(
        val_data,
        batch_size=cfg.TrainDataLoader["batch_size"],
        shuffle=False,
        num_workers=cfg.TrainDataLoader["num_workers"],
        pin_memory=False,
        collate_fn=merge_second_batch,
        worker_init_fn=_worker_init_fn,
        drop_last=False)
    if cfg.WeightManager["restore"] is not None:
        restore(cfg.WeightManager["restore"], net)
    logger = Logger()
    t = time.time()
    net.eval()
    if is_val:
        detections = []
        result_path_step = Path(vis_dir)
        result_path_step.mkdir(parents=True, exist_ok=True)
        logger.log_txt("#################################")
        logger.log_txt("# EVAL")
        logger.log_txt("#################################")
        with torch.no_grad():
            for val_example in tqdm(val_dataloader):
                val_example = example_convert_to_torch(val_example, torch.float32)
                detection = net(val_example)
                detections += detection
        result_dict = val_data.dataset.evaluation(detections, str(result_path_step))
        for k, v in result_dict["results"].items():
            logger.log_txt("Evaluation {}".format(k))
            logger.log_txt(v)
        logger.log_metrics(result_dict["detail"], -1)
        detections = val_data.dataset.convert_detection_to_kitti_annos(detections)
        with open(result_path_step / "result.pkl", 'wb') as f:
            pickle.dump(detections, f)
    else:
        detections = []
        # load raw data
        data_dir = rawdata_dir
        vis_dir = vis_dir
        os.makedirs(vis_dir, exist_ok=True)
        pc_dir = os.path.join(data_dir, "velodyne_points", "data")
        img2_dir = os.path.join(data_dir, "image_02", "data")
        calib_dir = os.path.join(data_dir, "calib")
        calib = read_calib(calib_dir)
        idx_list = os.listdir(pc_dir)
        idx_list = [idx.split(".")[0] for idx in idx_list]
        idx_list.sort(key=int)
        # for item in all data
        with torch.no_grad():
            for idx in tqdm(idx_list):
                # getitem
                pc = read_pc_from_bin(os.path.join(pc_dir, idx+".bin"))
                img = read_image(os.path.join(img2_dir, idx+".png"))
                input_dict = {
                    "lidar": {
                        "type": "lidar",
                        "points": pc,
                    },
                    "metadata": {
                        "image_idx": int(idx),
                        "image_shape": None,
                    },
                    "calib": None,
                    "cam": {}
                }
                calib_dict = {
                    'rect': calib.R0_rect,
                    'Trv2c': calib.Tr_velo_to_cam,
                    'P2': np.concatenate([calib.P2, np.array([[0, 0, 0, 1]])], axis=0),
                }
                input_dict['calib'] = calib_dict
                example = val_data.dataset._prep_func(input_dict)
                if "image_idx" in input_dict["metadata"]:
                    example["metadata"] = input_dict["metadata"]
                if "anchors_mask" in example:
                    example["anchors_mask"] = example["anchors_mask"].astype(np.uint8)
                example = merge_second_batch([example])
                val_example = example_convert_to_torch(example, torch.float32)
                detection = net(val_example)
                detections += detection
        detections = val_data.dataset.convert_detection_to_kitti_annos(detections)
        # save results
        result_path_step = Path(vis_dir)
        with open(result_path_step / "result.pkl", 'wb') as f:
            pickle.dump(detections, f)
Exemple #2
0
def main(tag, cfg_path):
    root_dir = Path(__file__).parent
    log_dir = root_dir / "logs" / tag
    saved_weights_dir = root_dir / "saved_weights" / tag
    log_dir.mkdir(parents=True, exist_ok=True)
    saved_weights_dir.mkdir(parents=True, exist_ok=True)
    setup_logger(log_dir)
    cfg = load_config_file(cfg_path=cfg_path, log_dir=log_dir)

    # build net
    voxelizer = voxelizer_builder.build(voxelizer_cfg=cfg.Voxelizer)
    # anchor_generator = anchor_generator_builder.build(anchor_generator_cfg=cfg.AnchorGenerator)
    box_coder = box_coder_builder.build(box_coder_cfg=cfg.BoxCoder)
    # similarity_calculator = similarity_calculator_builder.build(similarity_calculator_cfg=cfg.SimilarityCalculator)
    # target_assigner = target_assigner_builder.build(target_assigner_cfg=cfg.TargetAssigner,
    #                                                 box_coder=box_coder,
    #                                                 anchor_generators=[anchor_generator],
    #                                                 region_similarity_calculators=[similarity_calculator])
    target_assigner = target_assigner_builder.build_multiclass(
        target_assigner_cfg=cfg.TargetAssigner, box_coder=box_coder)
    net = second_builder.build(cfg=cfg.Net,
                               voxelizer=voxelizer,
                               target_assigner=target_assigner).cuda()
    # build dataloader
    train_data = dataloader_builder.build(cfg.Net,
                                          cfg.TrainDataLoader,
                                          voxelizer,
                                          target_assigner,
                                          training=True)
    train_dataloader = torch.utils.data.DataLoader(
        train_data,
        batch_size=cfg.TrainDataLoader["batch_size"],
        shuffle=True,
        num_workers=cfg.TrainDataLoader["num_workers"],
        pin_memory=False,
        collate_fn=merge_second_batch,
        worker_init_fn=_worker_init_fn,
        drop_last=False)
    val_data = dataloader_builder.build(cfg.Net,
                                        cfg.ValDataLoader,
                                        voxelizer,
                                        target_assigner,
                                        training=False)
    val_dataloader = torch.utils.data.DataLoader(
        val_data,
        batch_size=cfg.TrainDataLoader["batch_size"],
        shuffle=False,
        num_workers=cfg.TrainDataLoader["num_workers"],
        pin_memory=False,
        collate_fn=merge_second_batch,
        worker_init_fn=_worker_init_fn,
        drop_last=False)
    # build optimizer
    optimizer, lr_scheduler = optimizer_builder.build(
        optimizer_cfg=cfg.Optimizer, lr_scheduler_cfg=cfg.LRScheduler, net=net)
    # build evaluater
    # evaluater = evaluater_builder.build(evaluater_cfg=cfg["evaluater"])
    evaluater = None
    if cfg.WeightManager["restore"] is not None:
        restore(cfg.WeightManager["restore"], net)
    logger = Logger()
    start_step = net.get_global_step()
    total_step = cfg.Optimizer["steps"]
    disp_itv = cfg.Task["disp_itv"]
    save_itv = cfg.Task["save_itv"]
    optimizer.zero_grad()
    step_times = []
    step = start_step
    t = time.time()
    while step < total_step:
        for example in train_dataloader:
            lr_scheduler.step(net.get_global_step())
            example_torch = example_convert_to_torch(example, torch.float32)
            batch_size = example["anchors"].shape[0]
            ret_dict = net(example_torch)
            cls_preds = ret_dict["cls_preds"]
            loss = ret_dict["loss"].mean()
            cls_loss_reduced = ret_dict["cls_loss_reduced"].mean()
            loc_loss_reduced = ret_dict["loc_loss_reduced"].mean()
            cls_pos_loss = ret_dict["cls_pos_loss"].mean()
            cls_neg_loss = ret_dict["cls_neg_loss"].mean()
            loc_loss = ret_dict["loc_loss"]
            cls_loss = ret_dict["cls_loss"]
            cared = ret_dict["cared"]
            labels = example_torch["labels"]
            loss.backward()
            torch.nn.utils.clip_grad_norm_(net.parameters(), 10.0)
            optimizer.step()
            optimizer.zero_grad()
            net.update_global_step()
            step_time = (time.time() - t)
            step_times.append(step_time)
            t = time.time()
            num_pos = int((labels > 0)[0].float().sum().cpu().numpy())
            num_neg = int((labels == 0)[0].float().sum().cpu().numpy())
            if step % disp_itv == 0 and step != 0:
                print(
                    step,
                    f"loss: {loss}, cls_pos_loss: {cls_pos_loss}, cls_neg_loss: {cls_neg_loss}, loc_loss: {loc_loss.mean()}"
                )
                logger.log_tsbd_scalor("train/loss", loss,
                                       net.get_global_step())
            if step % save_itv == 0 and step != 0:
                save_models(saved_weights_dir, [net, optimizer],
                            net.get_global_step(),
                            max_to_keep=float('inf'))
                net.eval()
                detections = []
                result_path_step = log_dir / f"step_{net.get_global_step()}"
                result_path_step.mkdir(parents=True, exist_ok=True)
                logger.log_txt("#################################" + str(step))
                logger.log_txt("# VAL" + str(step))
                logger.log_txt("#################################" + str(step))
                for val_example in tqdm(val_dataloader):
                    val_example = example_convert_to_torch(
                        val_example, torch.float32)
                    detections += net(val_example)
                if cfg.ValDataLoader["Dataset"]["name"] == "MyKittiDataset":
                    result_dict = val_data.dataset.evaluation(
                        detections,
                        label_dir=os.path.join(val_data.dataset.root_path,
                                               "training", "label_2"),
                        output_dir=str(result_path_step))
                elif cfg.ValDataLoader["Dataset"]["name"] == "KittiDataset":
                    result_dict = val_data.dataset.evaluation(
                        detections, str(result_path_step))
                logger.log_metrics(result_dict["detail"], step)
                with open(result_path_step / "result.pkl", 'wb') as f:
                    pickle.dump(detections, f)
                net.train()
            step += 1
Exemple #3
0
        root_path=dataloader_cfg["Dataset"]["kitti_root_path"],
        class_names=class_names,
        prep_func=prep_func,
        num_point_features=num_point_features)
    dataset = DatasetWrapper(dataset)
    return dataset


if __name__ == "__main__":
    from det3.methods.second.builder import (voxelizer_builder,
                                             box_coder_builder,
                                             similarity_calculator_builder,
                                             anchor_generator_builder,
                                             target_assigner_builder)
    cfg = load_module("methods/second/configs/config.py", name="cfg")
    voxelizer = voxelizer_builder.build(voxelizer_cfg=cfg.Voxelizer)
    anchor_generator = anchor_generator_builder.build(
        anchor_generator_cfg=cfg.AnchorGenerator)
    box_coder = box_coder_builder.build(box_coder_cfg=cfg.BoxCoder)
    similarity_calculator = similarity_calculator_builder.build(
        similarity_calculator_cfg=cfg.SimilarityCalculator)
    target_assigner = target_assigner_builder.build(
        target_assigner_cfg=cfg.TargetAssigner,
        box_coder=box_coder,
        anchor_generators=[anchor_generator],
        region_similarity_calculators=[similarity_calculator])
    build(cfg.Net,
          cfg.TrainDataLoader,
          voxelizer,
          target_assigner,
          training=True)