def test_dataset_by_frame_ids():
    frame_ids = set([
        724, 1532, 5037, 5992, 6040, 6723, 7088, 7386, 7977, 8762, 9769, 9891
    ])
    if platform.system() == "Linux":
        datalake_path = r"/data1/workspaces/aiftimie/tms/tms_data"
    else:
        datalake_path = r"D:\tms_data"
    download_data_if_not_exists(datalake_path)
    coco_dset = get_dataset(datalake_path)

    g_tdp_fdp_1: Iterable[Tuple[FrameDatapoint, TargetDatapoint]]
    g_tdp_fdp_2: Iterable[Tuple[FrameDatapoint, TargetDatapoint]]
    g_tdp_fdp_1, g_tdp_fdp_2 = tee(
        gen_cocoitem2datapoints(coco_dset, frame_ids))
    g_tdp = gen_cocoitem2targetdp(g_tdp_fdp_1)
    g_fdp_1, g_fdp_2, g_fdp_3 = tee(gen_cocoitem2framedp(g_tdp_fdp_2), 3)

    model = create_model_efficient(
        model_creation_func=partial(create_model, max_operating_res=800))
    g_pred = compute(g_fdp_1,
                     model,
                     batch_size=5,
                     filter_classes=model_class_names)
    for fdp_pred, fdp_gt in zip(plot_detections(g_fdp_2, g_pred),
                                plot_targets(g_fdp_3, g_tdp)):
        cv2.imshow("image_pred", fdp_pred.image)
        cv2.imshow("image_gt", fdp_gt.image)
        cv2.waitKey(0)
def test_target_pred_iter_to_pandas(mock_some_obj_some_method):
    mock_some_obj_some_method.return_value = 100

    if platform.system() == "Linux":
        datalake_path = r"/data1/workspaces/aiftimie/tms/tms_data"
    else:
        datalake_path = r"D:\tms_data"
    download_data_if_not_exists(datalake_path)
    coco_dset = get_dataset(datalake_path)

    g_tdp_fdp_1: Iterable[Tuple[FrameDatapoint, TargetDatapoint]]
    g_tdp_fdp_2: Iterable[Tuple[FrameDatapoint, TargetDatapoint]]
    g_tdp_fdp_1, g_tdp_fdp_2 = tee(gen_cocoitem2datapoints(coco_dset))
    g_tdp = gen_cocoitem2targetdp(g_tdp_fdp_1)
    g_fdp = gen_cocoitem2framedp(g_tdp_fdp_2)

    model = create_model_efficient(
        model_creation_func=partial(create_model, max_operating_res=800))
    g_pred = compute(g_fdp,
                     model,
                     batch_size=5,
                     filter_classes=model_class_names)

    df_pred, df_target = target_pred_iter_to_pandas(g_tdp, g_pred)
    if platform.system() == "Linux":
        df_pred.to_csv(
            "/data1/workspaces/aiftimie/tms/tms_experiments/pandas_dataframes/coco_pred.csv"
        )
        df_target.to_csv(
            "/data1/workspaces/aiftimie/tms/tms_experiments/pandas_dataframes/coco_target.csv"
        )
    else:
        pass
def test_get_dataset():
    if platform.system() == "Linux":
        datalake_path = r"D:\tms_data"
    else:
        datalake_path = r"/data1/workspaces/aiftimie/tms/tms_data"
    download_data_if_not_exists(datalake_path)
    # if error about some mask: https://stackoverflow.com/questions/49311195/how-to-install-coco-pythonapi-in-python3
    coco_dset = get_dataset(datalake_path)
    assert len(coco_dset) == 5000
def test_target_iter_to_pandas(mock_some_obj_some_method):
    mock_some_obj_some_method.return_value = 100

    if platform.system() == "Linux":
        datalake_path = r"/data1/workspaces/aiftimie/tms/tms_data"
    else:
        datalake_path = r"D:\tms_data"
    download_data_if_not_exists(datalake_path)

    coco_dset = get_dataset(datalake_path)

    g = gen_cocoitem2datapoints(coco_dset)
    g2 = gen_cocoitem2targetdp(g)
    df = target_iter_to_pandas(g2)
def get_dataframes(datalake_path, pred_csv_path, target_csv_path, max_operating_res):
    download_data_if_not_exists(datalake_path)
    coco_dset = get_dataset(datalake_path)

    g_tdp_fdp_1, g_tdp_fdp_2 = tee(gen_cocoitem2datapoints(coco_dset))
    g_tdp = gen_cocoitem2targetdp(g_tdp_fdp_1)
    g_fdp = gen_cocoitem2framedp(g_tdp_fdp_2)

    model = create_model_efficient(model_creation_func=partial(create_model, max_operating_res=max_operating_res, conf_thr=0.05))
    g_pred = compute(g_fdp, model, batch_size=10, filter_classes=model_class_names)

    df_pred, df_target = target_pred_iter_to_pandas(g_tdp, g_pred)

    df_pred.to_csv(pred_csv_path)
    df_target.to_csv(target_csv_path)
    return df_pred, df_target
def evaluate_speed(mock_some_obj_some_method, datalake_path, max_operating_res):
    num_eval_frames = 200
    mock_some_obj_some_method.return_value = num_eval_frames

    download_data_if_not_exists(datalake_path)
    coco_dset = get_dataset(datalake_path)

    g_tdp_fdp_1, g_tdp_fdp_2 = tee(gen_cocoitem2datapoints(coco_dset))
    g_tdp = gen_cocoitem2targetdp(g_tdp_fdp_1)
    g_fdp = gen_cocoitem2framedp(g_tdp_fdp_2)

    model = create_model_efficient(
        model_creation_func=partial(create_model, max_operating_res=max_operating_res, conf_thr=0.05))
    g_pred = compute(g_fdp, model, batch_size=10, filter_classes=model_class_names)
    start = time.time()
    df_pred, df_target = target_pred_iter_to_pandas(g_tdp, g_pred)
    end = time.time()
    return (end - start) / num_eval_frames
def test_gen_cocoitem2datapoints(mock_some_obj_some_method):
    mock_some_obj_some_method.return_value = 100

    if platform.system() == "Linux":
        datalake_path = r"/data1/workspaces/aiftimie/tms/tms_data"
    else:
        datalake_path = r"D:\tms_data"
    download_data_if_not_exists(datalake_path)
    coco_dset = get_dataset(datalake_path)
    g = gen_cocoitem2datapoints(coco_dset)
    for fdp, tdp in g:
        dummy_pred = tdp.target
        dummy_pred['scores'] = np.zeros_like(dummy_pred['labels'])
        dummy_pred['obj_id'] = np.zeros_like(dummy_pred['labels'])
        image = plot_over_image(fdp.image, dummy_pred)
        cv2.imshow("image", image)
        cv2.waitKey(0)
        assert isinstance(fdp, FrameDatapoint)
        assert isinstance(tdp, TargetDatapoint)
def test_download_data_if_not_exists(tmpdir):
    download_data_if_not_exists(tmpdir)
    assert os.path.exists(os.path.join(tmpdir, "coco_val_2017"))
    assert os.path.exists(os.path.join(tmpdir, "coco_val_2017", "val2017.zip"))
    assert os.path.exists(
        os.path.join(tmpdir, "coco_val_2017", "annotations_trainval2017.zip"))
    assert os.path.exists(os.path.join(tmpdir, "coco_val_2017", "val2017"))
    assert os.path.exists(os.path.join(tmpdir, "coco_val_2017", "annotations"))
    assert len(os.listdir(os.path.join(tmpdir, "coco_val_2017",
                                       "val2017"))) == 5000
    assert len(os.listdir(os.path.join(tmpdir, "coco_val_2017",
                                       "annotations"))) == 6
    json_file = os.path.join(tmpdir, "coco_val_2017", "annotations",
                             "instances_val2017.json")
    if json_file is not None:
        with open(json_file, 'r') as COCO:
            js = json.loads(COCO.read())
            assert {item['id']: item['name']
                    for item in js['categories']} == coco_val_2017_names