def main(model_config,
         dataset_type,
         save_outputs,
         output_dir,
         data_config,
         seed,
         small_run,
         entry,
         device):
    # Load the model
    model = make_model(**model_config)
    # model.sid_obj.to(device)
    # print(model)
    model.to(device)

    # Load the data
    _, val, test = load_data()
    dataset = test if dataset_type == "test" else val

    init_randomness(seed)
    if entry is None:
        print("Evaluating the model on {} ({})".format(data_config["data_name"],
                                                       dataset_type))
        evaluate_model_on_dataset(model, dataset, small_run, device, save_outputs, output_dir)
    else:
        print("Evaluating {}".format(entry))
        evaluate_model_on_data_entry(model, dataset, entry, device)
Ejemplo n.º 2
0
    # print(config)
    # print(spad_config)
    del data_config["data_name"]
    model = DORN_sinkhorn_opt(
        sgd_iters=400,
        sinkhorn_iters=40,
        sigma=.5,
        lam=1e-2,
        kde_eps=1e-4,
        sinkhorn_eps=1e-4,
        remove_dc=spad_config["dc_count"] > 0.,
        use_intensity=spad_config["use_intensity"],
        use_squared_falloff=spad_config["use_squared_falloff"],
        lr=1e3)
    model.to(device)
    _, _, test = load_data(**data_config, spad_config=spad_config)

    dataloader = DataLoader(test, shuffle=True)
    start = perf_counter()
    init_randomness(95290421)
    input_ = test.get_item_by_id("kitchen_0002/1121")
    for key in [
            "rgb", "rgb_orig", "rawdepth", "spad", "mask", "rawdepth_orig",
            "mask_orig"
    ]:
        input_[key] = input_[key].unsqueeze(0).to(device)
    data_load_time = perf_counter() - start
    print("dataloader: {}".format(data_load_time))
    # print(input_["entry"])
    # print(model.hints_extractor[0].weight)
Ejemplo n.º 3
0
    spad_config = spad_cfg()
    # print(config)
    # print(spad_config)
    del data_config["data_name"]
    model = DORN_nyu_hints(
            in_channels=3,
            in_height=257,
            in_width=353,
            sid_bins=68,
            offset=data_config["offset"],
            min_depth=data_config["min_depth"],
            max_depth=data_config["max_depth"],
            alpha=data_config["alpha"],
            beta=data_config["beta"],
            frozen=True,
            pretrained=True,
            state_dict_file=os.path.join("models", "torch_params_nyuv2_BGR.pth.tar"),
            hints_len=68,
            spad_weight=1.
    )
    train, _, _ = load_data(**data_config, spad_config=spad_config)

    dataloader = DataLoader(train)
    start = perf_counter()
    input_ = next(iter(dataloader))
    data_load_time = perf_counter() - start
    print("dataloader: {}".format(data_load_time))
    # print(input_["entry"])
    # print(model.hints_extractor[0].weight)
    loss, output = model.get_loss(input_, "cpu")
    print(loss)