Exemple #1
0
    def val_dataloader(self):
        # created chopped dataset

        rasterizer = build_rasterizer(cfg, dm)
        eval_cfg = cfg["valid_data_loader"]
        num_frames_to_chop = 100
        eval_base_path = create_chopped_dataset(
            dm.require(eval_cfg["key"]),
            cfg["raster_params"]["filter_agents_threshold"],
            num_frames_to_chop, cfg["model_params"]["future_num_frames"],
            MIN_FUTURE_STEPS)

        eval_zarr_path = str(
            Path(eval_base_path) / Path(dm.require(eval_cfg["key"])).name)
        eval_mask_path = str(Path(eval_base_path) / "mask.npz")
        eval_gt_path = str(Path(eval_base_path) / "gt.csv")
        self.eval_gt_path = eval_gt_path

        eval_zarr = ChunkedDataset(eval_zarr_path).open(cache_size_bytes=10e9)
        eval_mask = np.load(eval_mask_path)["arr_0"]

        eval_dataset = AgentDataset(cfg,
                                    eval_zarr,
                                    rasterizer,
                                    agents_mask=eval_mask)
        eval_dataloader = DataLoader(eval_dataset,
                                     shuffle=False,
                                     batch_size=eval_cfg["batch_size"],
                                     num_workers=8)

        return eval_dataloader
Exemple #2
0
def build_chopped_dataset():
    dm = get_dm()

    eval_cfg = cfg["val_data_loader"]

    MIN_FUTURE_STEPS = 10
    num_frames_to_chop = [80, 130, 180]
    for chop_frame in num_frames_to_chop:
        eval_base_path = create_chopped_dataset(
            dm.require(eval_cfg["key"]),
            cfg["raster_params"]["filter_agents_threshold"], chop_frame,
            cfg["model_params"]["future_num_frames"], MIN_FUTURE_STEPS)
        print(eval_base_path)
Exemple #3
0
def main():
    dm = LocalDataManager(None)
    rasterizer = build_rasterizer(cfg, dm)

    num_frames_to_chop = 10
    eval_base_path = create_chopped_dataset(
        dm.require("scenes/validate.zarr"),
        cfg["raster_params"]["filter_agents_threshold"],
        num_frames_to_chop,
        cfg["model_params"]["future_num_frames"],
        MIN_FUTURE_STEPS,
    )

    print("Path:", eval_base_path)
Exemple #4
0
def generate_eval_dataset(cfg, dm, rasterizer):
    eval_cfg = cfg["test_data_loader"]
    eval_dir = shutil.copytree(dm.require(eval_cfg["key"]), '/tmp/lyft/test.zarr')
    eval_cfg = cfg["test_data_loader"]
    num_frames_to_chop = 50
    eval_base_path = create_chopped_dataset(eval_dir, cfg["raster_params"]["filter_agents_threshold"], 
                                num_frames_to_chop, cfg["model_params"]["future_num_frames"], MIN_FUTURE_STEPS)

    eval_zarr_path = str(Path(eval_base_path) / "test.zarr")
    eval_mask_path = str(Path(eval_base_path) / "mask.npz")
    eval_gt_path = str(Path(eval_base_path) / "gt.csv")

    eval_zarr = ChunkedDataset(eval_zarr_path).open()
    eval_mask = np.load(eval_mask_path)["arr_0"]
    # ===== INIT DATASET AND LOAD MASK
    eval_dataset = AgentDataset(cfg, eval_zarr, rasterizer, agents_mask=eval_mask)
    eval_dataloader = DataLoader(eval_dataset, shuffle=eval_cfg["shuffle"], batch_size=eval_cfg["batch_size"], 
                                num_workers=eval_cfg["num_workers"])
    eval_dataset_ego = EgoDataset(cfg, eval_zarr, rasterizer)

    return eval_dataset, eval_dataloader, eval_dataset_ego, eval_gt_path
Exemple #5
0
            train_dataset,
            np.arange(cfg["train_data_loader"]["batch_size"] * 40))
    train_loader = DataLoader(train_dataset,
                              shuffle=train_cfg["shuffle"],
                              batch_size=train_cfg["batch_size"],
                              num_workers=train_cfg["num_workers"])
    print(train_agent_dataset)

    # GENERATE AND LOAD CHOPPED DATASET
    print('=' * 10 + 'Loading Validation' + '=' * 10)
    valid_cfg = cfg["valid_data_loader"]
    valid_path = "scenes/sample.zarr" if debug else valid_cfg["key"]
    num_frames_to_chop = 100
    MIN_FUTURE_STEPS = 10
    valid_base_path = create_chopped_dataset(
        dm.require(valid_path),
        cfg["raster_params"]["filter_agents_threshold"], num_frames_to_chop,
        cfg["model_params"]["future_num_frames"], MIN_FUTURE_STEPS)
    valid_zarr_path = str(
        Path(valid_base_path) / Path(dm.require(valid_path)).name)
    valid_mask_path = str(Path(valid_base_path) / "mask.npz")
    valid_gt_path = str(Path(valid_base_path) / "gt.csv")
    valid_zarr = ChunkedDataset(valid_zarr_path).open()
    valid_mask = np.load(valid_mask_path)["arr_0"]
    # ===== INIT DATASET AND LOAD MASK
    valid_agent_dataset = AgentDataset(cfg,
                                       valid_zarr,
                                       rasterizer,
                                       agents_mask=valid_mask)
    valid_dataset = TransformDataset(valid_agent_dataset, transform)
    valid_loader = DataLoader(valid_dataset,
                              shuffle=valid_cfg["shuffle"],
Exemple #6
0
# Load data and model
lyft_data_module = models.LyftDataModule(data_path, config_path)

model = models.resnet_baseline.load_from_checkpoint(
    "/home/elias.nehme1/Documents/lyft-motion-prediction/epoch=0-v1.ckpt")

# Generating a chopped dataset
generate_chopped_dataset = False

if generate_chopped_dataset:
    num_frames_to_chop = 100
    eval_base_path = create_chopped_dataset(
        lyft_data_module.dm.require(lyft_data_module.val_cfg["key"]),
        lyft_data_module.cfg["raster_params"]["filter_agents_threshold"],
        num_frames_to_chop,
        lyft_data_module.cfg["model_params"]["future_num_frames"],
        MIN_FUTURE_STEPS,
    )

eval_base_path = "/home/elias.nehme1/Documents/lyft-motion-prediction/data/scenes/validate_chopped_100"

val_zarr_path = str(
    Path(eval_base_path) /
    Path(lyft_data_module.dm.require(lyft_data_module.val_cfg["key"])).name)
val_mask_path = str(Path(eval_base_path) / "mask.npz")
val_gt_path = str(Path(eval_base_path) / "gt.csv")

val_zarr = ChunkedDataset(val_zarr_path).open()
val_mask = np.load(val_mask_path)["arr_0"]
        data = next(tr_it)
    except StopIteration:
        tr_it = iter(evaluation_dataloader)
        data = next(tr_it)
    model.eval()
    torch.set_grad_enabled(False)
    validate_loss, _ = forward(data, model, device, criterion)

    losses_evaluation.append(validate_loss.item())
    progress_bar.set_description(f"loss: {validate_loss.item()} loss(avg): {np.mean(losses_evaluation)}")

# ===== GENERATE AND LOAD CHOPPED DATASET
num_frames_to_chop = 50
eval_cfg = training_cfg["val_data_loader"]
eval_base_path = create_chopped_dataset(dm.require(eval_cfg["key"]),
                                        training_cfg["raster_params"]["filter_agents_threshold"],
                                        num_frames_to_chop, training_cfg["model_params"]["future_num_frames"],
                                        MIN_FUTURE_STEPS)

eval_zarr_path = str(Path(eval_base_path) / Path(dm.require(eval_cfg["key"])).name)
eval_mask_path = str(Path(eval_base_path) / "mask.npz")
eval_gt_path = str(Path(eval_base_path) / "gt.csv")

eval_zarr = ChunkedDataset(eval_zarr_path).open()
eval_mask = np.load(eval_mask_path)["arr_0"]
# ===== INIT DATASET AND LOAD MASK
eval_dataset = AgentDataset(training_cfg, eval_zarr, rasterizer, agents_mask=eval_mask)
eval_dataloader = DataLoader(eval_dataset, shuffle=eval_cfg["shuffle"], batch_size=eval_cfg["batch_size"],
                             num_workers=eval_cfg["num_workers"])

model.eval()
torch.set_grad_enabled(False)
Exemple #8
0
def evaluate(cfg, model, dm, rasterizer, first_time, iters, eval_dataloader, eval_gt_path):
    if first_time:
        num_frames_to_chop = 100
        print("min_future_steps: ",MIN_FUTURE_STEPS)
        eval_cfg = cfg["val_data_loader"]
        eval_base_path = create_chopped_dataset(dm.require(eval_cfg["key"]), cfg["raster_params"]["filter_agents_threshold"], 
                                    num_frames_to_chop, cfg["model_params"]["future_num_frames"], MIN_FUTURE_STEPS)
        eval_zarr_path = str(Path(eval_base_path) / Path(dm.require(eval_cfg["key"])).name)
        eval_mask_path = str(Path(eval_base_path) / "mask.npz")
        eval_gt_path = str(Path(eval_base_path) / "gt.csv")
        eval_zarr = ChunkedDataset(eval_zarr_path).open()
        eval_mask = np.load(eval_mask_path)["arr_0"]
        eval_dataset = AgentDataset(cfg, eval_zarr, rasterizer, agents_mask=eval_mask)
        eval_dataloader = DataLoader(eval_dataset, shuffle=eval_cfg["shuffle"], batch_size=eval_cfg["batch_size"], 
                                    num_workers=eval_cfg["num_workers"])
        print(eval_dataset)
        first_time = False

    model.eval()
    torch.set_grad_enabled(False)

    future_coords_offsets_pd = []
    timestamps = []
    confidences_list = []
    agent_ids = []
    progress_bar = tqdm(eval_dataloader)
    for data in progress_bar:
        _, preds, confidences = forward(data, model)
        
        # convert agent coordinates into world offsets
        preds = preds.cpu().numpy()
        world_from_agents = data["world_from_agent"].numpy()
        centroids = data["centroid"].numpy()
        coords_offset = []
        
        for idx in range(len(preds)):
            for mode in range(3):
                preds[idx, mode, :, :] = transform_points(preds[idx, mode, :, :], world_from_agents[idx]) - centroids[idx][:2]
        
        future_coords_offsets_pd.append(preds.copy())
        confidences_list.append(confidences.cpu().numpy().copy())
        timestamps.append(data["timestamp"].numpy().copy())
        agent_ids.append(data["track_id"].numpy().copy())
    
    model.train()
    torch.set_grad_enabled(True)

    pred_path = os.path.join(cfg["save_path"],f"pred_{iters}.csv")

    write_pred_csv(pred_path,
        timestamps=np.concatenate(timestamps),
        track_ids=np.concatenate(agent_ids),
        coords=np.concatenate(future_coords_offsets_pd),
        confs = np.concatenate(confidences_list)
        )

        
    metrics = compute_metrics_csv(eval_gt_path, pred_path, [neg_multi_log_likelihood, time_displace])
    for metric_name, metric_mean in metrics.items():
        print(metric_name, metric_mean)

    return first_time, eval_dataloader, eval_gt_path
Exemple #9
0
def evaluate(model, device, data_path):

    # set env variable for data
    os.environ["L5KIT_DATA_FOLDER"] = data_path
    dm = LocalDataManager(None)

    cfg = model.cfg

    # ===== INIT DATASET
    test_cfg = cfg["test_data_loader"]

    # Rasterizer
    rasterizer = build_rasterizer(cfg, dm)

    # Test dataset/dataloader
    test_zarr = ChunkedDataset(dm.require(test_cfg["key"])).open()
    test_mask = np.load(f"{data_path}/scenes/mask.npz")["arr_0"]
    test_dataset = AgentDataset(cfg,
                                test_zarr,
                                rasterizer,
                                agents_mask=test_mask)
    test_dataloader = DataLoader(test_dataset,
                                 shuffle=test_cfg["shuffle"],
                                 batch_size=test_cfg["batch_size"],
                                 num_workers=test_cfg["num_workers"])
    test_dataloader = test_dataloader
    print(test_dataloader)

    # ==== EVAL LOOP
    model.eval()
    torch.set_grad_enabled(False)
    criterion = nn.MSELoss(reduction="none")

    # store information for evaluation
    future_coords_offsets_pd = []
    timestamps = []

    agent_ids = []
    progress_bar = tqdm(test_dataloader)
    for data in progress_bar:
        _, outputs, _ = model.forward(data, device, criterion)
        future_coords_offsets_pd.append(outputs.cpu().numpy().copy())
        timestamps.append(data["timestamp"].numpy().copy())
        agent_ids.append(data["track_id"].numpy().copy())

    # ==== Save Results
    pred_path = "./submission.csv"
    write_pred_csv(pred_path,
                   timestamps=np.concatenate(timestamps),
                   track_ids=np.concatenate(agent_ids),
                   coords=np.concatenate(future_coords_offsets_pd))

    # ===== GENERATE AND LOAD CHOPPED DATASET
    num_frames_to_chop = 56
    test_cfg = cfg["test_data_loader"]
    test_base_path = create_chopped_dataset(
        zarr_path=dm.require(test_cfg["key"]),
        th_agent_prob=cfg["raster_params"]["filter_agents_threshold"],
        num_frames_to_copy=num_frames_to_chop,
        num_frames_gt=cfg["model_params"]["future_num_frames"],
        min_frame_future=MIN_FUTURE_STEPS)

    eval_zarr_path = str(
        Path(test_base_path) / Path(dm.require(test_cfg["key"])).name)
    print(eval_zarr_path)
    test_mask_path = str(Path(test_base_path) / "mask.npz")
    test_gt_path = str(Path(test_base_path) / "gt.csv")

    test_zarr = ChunkedDataset(eval_zarr_path).open()
    test_mask = np.load(test_mask_path)["arr_0"]

    # ===== INIT DATASET AND LOAD MASK
    test_dataset = AgentDataset(cfg,
                                test_zarr,
                                rasterizer,
                                agents_mask=test_mask)
    test_dataloader = DataLoader(test_dataset,
                                 shuffle=test_cfg["shuffle"],
                                 batch_size=test_cfg["batch_size"],
                                 num_workers=test_cfg["num_workers"])
    print(test_dataset)

    # ==== Perform Evaluation
    print(test_gt_path)
    metrics = compute_metrics_csv(test_gt_path, pred_path,
                                  [neg_multi_log_likelihood, time_displace])
    for metric_name, metric_mean in metrics.items():
        print(metric_name, metric_mean)