def test_random_crop_with_bbox_op_coco_c(plot_vis=False):
    """
    Prints images and bboxes side by side with and without RandomCropWithBBox Op applied,
    Testing with Coco dataset
    """
    logger.info("test_random_crop_with_bbox_op_coco_c")
    # load dataset
    dataCoco1 = ds.CocoDataset(DATA_DIR_COCO[0], annotation_file=DATA_DIR_COCO[1], task="Detection",
                               decode=True, shuffle=False)

    dataCoco2 = ds.CocoDataset(DATA_DIR_COCO[0], annotation_file=DATA_DIR_COCO[1], task="Detection",
                               decode=True, shuffle=False)

    test_op = c_vision.RandomCropWithBBox([512, 512], [200, 200, 200, 200])

    dataCoco2 = dataCoco2.map(input_columns=["image", "bbox"],
                              output_columns=["image", "bbox"],
                              columns_order=["image", "bbox"],
                              operations=[test_op])

    unaugSamp, augSamp = [], []

    for unAug, Aug in zip(dataCoco1.create_dict_iterator(), dataCoco2.create_dict_iterator()):
        unaugSamp.append(unAug)
        augSamp.append(Aug)

    if plot_vis:
        visualize_with_bounding_boxes(unaugSamp, augSamp, "bbox")
Beispiel #2
0
def test_bounding_box_augment_op_coco_c(plot_vis=False):
    """
    Prints images and bboxes side by side with and without BoundingBoxAugment Op applied,
    Testing with COCO dataset
    """
    logger.info("test_bounding_box_augment_op_coco_c")

    dataCoco1 = ds.CocoDataset(DATA_DIR_2[0],
                               annotation_file=DATA_DIR_2[1],
                               task="Detection",
                               decode=True,
                               shuffle=False)

    dataCoco2 = ds.CocoDataset(DATA_DIR_2[0],
                               annotation_file=DATA_DIR_2[1],
                               task="Detection",
                               decode=True,
                               shuffle=False)

    test_op = c_vision.BoundingBoxAugment(c_vision.RandomHorizontalFlip(1), 1)

    dataCoco2 = dataCoco2.map(input_columns=["image", "bbox"],
                              output_columns=["image", "bbox"],
                              columns_order=["image", "bbox"],
                              operations=[test_op])

    unaugSamp, augSamp = [], []

    for unAug, Aug in zip(dataCoco1.create_dict_iterator(),
                          dataCoco2.create_dict_iterator()):
        unaugSamp.append(unAug)
        augSamp.append(Aug)

    if plot_vis:
        visualize_with_bounding_boxes(unaugSamp, augSamp, "bbox")
def test_resize_with_bbox_op_coco_c(plot_vis=False):
    """
    Prints images and bboxes side by side with and without ResizeWithBBox Op applied,
    tests with MD5 check, expected to pass
    Testing with COCO dataset
    """
    logger.info("test_resize_with_bbox_op_coco_c")

    # Load dataset
    dataCOCO1 = ds.CocoDataset(DATA_DIR_2[0], annotation_file=DATA_DIR_2[1], task="Detection",
                               decode=True, shuffle=False)

    dataCOCO2 = ds.CocoDataset(DATA_DIR_2[0], annotation_file=DATA_DIR_2[1], task="Detection",
                               decode=True, shuffle=False)

    test_op = c_vision.ResizeWithBBox(200)

    # map to apply ops

    dataCOCO2 = dataCOCO2.map(operations=[test_op], input_columns=["image", "bbox"],
                              output_columns=["image", "bbox"],
                              column_order=["image", "bbox"])

    filename = "resize_with_bbox_op_01_c_coco_result.npz"
    save_and_check_md5(dataCOCO2, filename, generate_golden=GENERATE_GOLDEN)

    unaugSamp, augSamp = [], []

    for unAug, Aug in zip(dataCOCO1.create_dict_iterator(num_epochs=1, output_numpy=True),
                          dataCOCO2.create_dict_iterator(num_epochs=1, output_numpy=True)):
        unaugSamp.append(unAug)
        augSamp.append(Aug)

    if plot_vis:
        visualize_with_bounding_boxes(unaugSamp, augSamp, annot_name="bbox")
Beispiel #4
0
def test_random_resize_with_bbox_op_rand_coco_c(plot_vis=False):
    """
    Prints images and bboxes side by side with and without RandomResizeWithBBox Op applied,
    tests with MD5 check, expected to pass
    testing with COCO dataset
    """
    logger.info("test_random_resize_with_bbox_op_rand_coco_c")
    original_seed = config_get_set_seed(231)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # Load dataset
    dataCoco1 = ds.CocoDataset(DATA_DIR_2[0],
                               annotation_file=DATA_DIR_2[1],
                               task="Detection",
                               decode=True,
                               shuffle=False)

    dataCoco2 = ds.CocoDataset(DATA_DIR_2[0],
                               annotation_file=DATA_DIR_2[1],
                               task="Detection",
                               decode=True,
                               shuffle=False)

    test_op = c_vision.RandomResizeWithBBox(200)

    # map to apply ops

    dataCoco2 = dataCoco2.map(input_columns=["image", "bbox"],
                              output_columns=["image", "bbox"],
                              columns_order=["image", "bbox"],
                              operations=[test_op])

    filename = "random_resize_with_bbox_op_01_c_coco_result.npz"
    save_and_check_md5(dataCoco2, filename, generate_golden=GENERATE_GOLDEN)

    unaugSamp, augSamp = [], []

    for unAug, Aug in zip(dataCoco1.create_dict_iterator(),
                          dataCoco2.create_dict_iterator()):
        unaugSamp.append(unAug)
        augSamp.append(Aug)

    if plot_vis:
        visualize_with_bounding_boxes(unaugSamp, augSamp, annot_name="bbox")

    # Restore config setting
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
Beispiel #5
0
def test_coco_detection():
    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection",
                           decode=True, shuffle=False)
    num_iter = 0
    image_shape = []
    bbox = []
    category_id = []
    for data in data1.create_dict_iterator():
        image_shape.append(data["image"].shape)
        bbox.append(data["bbox"])
        category_id.append(data["category_id"])
        num_iter += 1
    assert num_iter == 6
    assert image_shape[0] == (2268, 4032, 3)
    assert image_shape[1] == (561, 595, 3)
    assert image_shape[2] == (607, 585, 3)
    assert image_shape[3] == (642, 675, 3)
    assert image_shape[4] == (2268, 4032, 3)
    assert image_shape[5] == (2268, 4032, 3)
    assert np.array_equal(np.array([[10., 10., 10., 10.], [70., 70., 70., 70.]]), bbox[0])
    assert np.array_equal(np.array([[20., 20., 20., 20.], [80., 80., 80.0, 80.]]), bbox[1])
    assert np.array_equal(np.array([[30.0, 30.0, 30.0, 30.]]), bbox[2])
    assert np.array_equal(np.array([[40., 40., 40., 40.]]), bbox[3])
    assert np.array_equal(np.array([[50., 50., 50., 50.]]), bbox[4])
    assert np.array_equal(np.array([[60., 60., 60., 60.]]), bbox[5])
    assert np.array_equal(np.array([[1], [7]]), category_id[0])
    assert np.array_equal(np.array([[2], [8]]), category_id[1])
    assert np.array_equal(np.array([[3]]), category_id[2])
    assert np.array_equal(np.array([[4]]), category_id[3])
    assert np.array_equal(np.array([[5]]), category_id[4])
    assert np.array_equal(np.array([[6]]), category_id[5])
def test_coco_panoptic():
    data1 = ds.CocoDataset(DATA_DIR,
                           annotation_file=PANOPTIC_FILE,
                           task="Panoptic",
                           decode=True,
                           shuffle=False)
    num_iter = 0
    image_shape = []
    bbox = []
    category_id = []
    iscrowd = []
    area = []
    for data in data1.create_dict_iterator():
        image_shape.append(data["image"].shape)
        bbox.append(data["bbox"])
        category_id.append(data["category_id"])
        iscrowd.append(data["iscrowd"])
        area.append(data["area"])
        num_iter += 1
    assert num_iter == 2
    assert image_shape[0] == (2268, 4032, 3)
    assert np.array_equal(
        np.array([[472, 173, 36, 48], [340, 22, 154, 301], [486, 183, 30,
                                                            35]]), bbox[0])
    assert np.array_equal(np.array([[1], [1], [2]]), category_id[0])
    assert np.array_equal(np.array([[0], [0], [0]]), iscrowd[0])
    assert np.array_equal(np.array([[705], [14062], [626]]), area[0])
    assert image_shape[1] == (642, 675, 3)
    assert np.array_equal(
        np.array([[103, 133, 229, 422], [243, 175, 93, 164]]), bbox[1])
    assert np.array_equal(np.array([[1], [3]]), category_id[1])
    assert np.array_equal(np.array([[0], [0]]), iscrowd[1])
    assert np.array_equal(np.array([[43102], [6079]]), area[1])
Beispiel #7
0
def test_coco_sampler_chain():
    """
    Test Coco sampler chain
    """
    logger.info("test_coco_sampler_chain")

    sampler = ds.DistributedSampler(num_shards=2,
                                    shard_id=0,
                                    shuffle=False,
                                    num_samples=5)
    child_sampler = ds.RandomSampler(replacement=True, num_samples=2)
    sampler.add_child(child_sampler)
    data1 = ds.CocoDataset(COCO_DATA_DIR,
                           annotation_file=ANNOTATION_FILE,
                           task="Detection",
                           decode=True,
                           sampler=sampler)

    # Verify dataset size
    data1_size = data1.get_dataset_size()
    logger.info("dataset size is: {}".format(data1_size))
    assert data1_size == 1

    # Verify number of rows
    assert sum([1 for _ in data1]) == 1

    # Verify dataset contents
    res = []
    for item in data1.create_tuple_iterator(num_epochs=1, output_numpy=True):
        logger.info("item: {}".format(item))
        res.append(item)
    logger.info("dataset: {}".format(res))
def test_coco_keypoint():
    data1 = ds.CocoDataset(DATA_DIR,
                           annotation_file=KEYPOINT_FILE,
                           task="Keypoint",
                           decode=True,
                           shuffle=False)
    num_iter = 0
    image_shape = []
    keypoints = []
    num_keypoints = []
    for data in data1.create_dict_iterator():
        image_shape.append(data["image"].shape)
        keypoints.append(data["keypoints"])
        num_keypoints.append(data["num_keypoints"])
        num_iter += 1
    assert num_iter == 2
    assert image_shape[0] == (2268, 4032, 3)
    assert image_shape[1] == (561, 595, 3)
    assert np.array_equal(
        np.array([[
            368., 61., 1., 369., 52., 2., 0., 0., 0., 382., 48., 2., 0., 0.,
            0., 368., 84., 2., 435., 81., 2., 362., 125., 2., 446., 125., 2.,
            360., 153., 2., 0., 0., 0., 397., 167., 1., 439., 166., 1., 369.,
            193., 2., 461., 234., 2., 361., 246., 2., 474., 287., 2.
        ]]), keypoints[0])
    assert np.array_equal(np.array([[14]]), num_keypoints[0])
    assert np.array_equal(
        np.array([[
            244., 139., 2., 0., 0., 0., 226., 118., 2., 0., 0., 0., 154., 159.,
            2., 143., 261., 2., 135., 312., 2., 271., 423., 2., 184., 530., 2.,
            261., 280., 2., 347., 592., 2., 0., 0., 0., 123., 596., 2., 0., 0.,
            0., 0., 0., 0., 0., 0., 0., 0., 0., 0.
        ]]), keypoints[1])
    assert np.array_equal(np.array([[10]]), num_keypoints[1])
Beispiel #9
0
def test_coco_dataset_size():
    dataset = ds.CocoDataset(COCO_DATA_DIR,
                             annotation_file=ANNOTATION_FILE,
                             task="Detection",
                             decode=True,
                             shuffle=False)
    assert dataset.get_dataset_size() == 6

    dataset_shard_2_0 = ds.CocoDataset(COCO_DATA_DIR,
                                       annotation_file=ANNOTATION_FILE,
                                       task="Detection",
                                       decode=True,
                                       shuffle=False,
                                       num_shards=2,
                                       shard_id=0)
    assert dataset_shard_2_0.get_dataset_size() == 3
Beispiel #10
0
def test_get_column_name_coco():
    data = ds.CocoDataset(COCO_DIR,
                          annotation_file=COCO_ANNOTATION,
                          task="Detection",
                          decode=True,
                          shuffle=False)
    assert data.get_col_names() == ["image", "bbox", "category_id", "iscrowd"]
Beispiel #11
0
def test_coco_panootic_classindex():
    data1 = ds.CocoDataset(DATA_DIR, annotation_file=PANOPTIC_FILE, task="Panoptic", decode=True)
    class_index = data1.get_class_indexing()
    assert class_index == {'person': [1, 1], 'bicycle': [2, 1], 'car': [3, 1]}
    num_iter = 0
    for _ in data1.__iter__():
        num_iter += 1
    assert num_iter == 2
Beispiel #12
0
def test_coco_case_0():
    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
    data1 = data1.shuffle(10)
    data1 = data1.batch(3, pad_info={})
    num_iter = 0
    for _ in data1.create_dict_iterator():
        num_iter += 1
    assert num_iter == 2
Beispiel #13
0
def test_coco_detection_classindex():
    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
    class_index = data1.get_class_indexing()
    assert class_index == {'person': [1], 'bicycle': [2], 'car': [3], 'cat': [4], 'dog': [5], 'monkey': [6],
                           'bag': [7], 'orange': [8]}
    num_iter = 0
    for _ in data1.__iter__():
        num_iter += 1
    assert num_iter == 6
Beispiel #14
0
def test_coco_case_2():
    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
    resize_op = vision.Resize((224, 224))

    data1 = data1.map(input_columns=["image"], operations=resize_op)
    data1 = data1.repeat(4)
    num_iter = 0
    for _ in data1.__iter__():
        num_iter += 1
    assert num_iter == 24
Beispiel #15
0
def test_coco_case_exception():
    try:
        data1 = ds.CocoDataset("path_not_exist/", annotation_file=ANNOTATION_FILE, task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except ValueError as e:
        assert "does not exist or permission denied" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR, annotation_file="./file_not_exist", task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except ValueError as e:
        assert "does not exist or permission denied" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Invalid task")
        for _ in data1.__iter__():
            pass
        assert False
    except ValueError as e:
        assert "Invalid task type" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR, annotation_file=LACKOFIMAGE_FILE, task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "Invalid node found in json" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR, annotation_file=INVALID_CATEGORY_ID_FILE, task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "category_id can't find in categories" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR, annotation_file=INVALID_FILE, task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "json.exception.parse_error" in str(e)

    try:
        sampler = ds.PKSampler(3)
        data1 = ds.CocoDataset(DATA_DIR, annotation_file=INVALID_FILE, task="Detection", sampler=sampler)
        for _ in data1.__iter__():
            pass
        assert False
    except ValueError as e:
        assert "CocoDataset doesn't support PKSampler" in str(e)
def test_coco_stuff():
    data1 = ds.CocoDataset(DATA_DIR,
                           annotation_file=ANNOTATION_FILE,
                           task="Stuff",
                           decode=True,
                           shuffle=False)
    num_iter = 0
    image_shape = []
    segmentation = []
    iscrowd = []
    for data in data1.create_dict_iterator():
        image_shape.append(data["image"].shape)
        segmentation.append(data["segmentation"])
        iscrowd.append(data["iscrowd"])
        num_iter += 1
    assert num_iter == 6
    assert image_shape[0] == (2268, 4032, 3)
    assert image_shape[1] == (561, 595, 3)
    assert image_shape[2] == (607, 585, 3)
    assert image_shape[3] == (642, 675, 3)
    assert image_shape[4] == (2268, 4032, 3)
    assert image_shape[5] == (2268, 4032, 3)
    assert np.array_equal(
        np.array([[10., 12., 13., 14., 15., 16., 17., 18., 19., 20.],
                  [70., 72., 73., 74., 75., -1., -1., -1., -1., -1.]]),
        segmentation[0])
    assert np.array_equal(np.array([[0], [0]]), iscrowd[0])
    assert np.array_equal(
        np.array([[
            20.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0
        ], [10.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0,
            -1.0]]), segmentation[1])
    assert np.array_equal(np.array([[0], [1]]), iscrowd[1])
    assert np.array_equal(
        np.array([[40., 42., 43., 44., 45., 46., 47., 48., 49., 40., 41.,
                   42.]]), segmentation[2])
    assert np.array_equal(np.array([[0]]), iscrowd[2])
    assert np.array_equal(
        np.array(
            [[50., 52., 53., 54., 55., 56., 57., 58., 59., 60., 61., 62.,
              63.]]), segmentation[3])
    assert np.array_equal(np.array([[0]]), iscrowd[3])
    assert np.array_equal(
        np.array([[
            60., 62., 63., 64., 65., 66., 67., 68., 69., 70., 71., 72., 73.,
            74.
        ]]), segmentation[4])
    assert np.array_equal(np.array([[0]]), iscrowd[4])
    assert np.array_equal(
        np.array([[60., 62., 63., 64., 65., 66., 67.],
                  [68., 69., 70., 71., 72., 73., 74.]]), segmentation[5])
    assert np.array_equal(np.array([[0]]), iscrowd[5])
Beispiel #17
0
def test_coco_case_1():
    data1 = ds.CocoDataset(DATA_DIR, annotation_file=ANNOTATION_FILE, task="Detection", decode=True)
    sizes = [0.5, 0.5]
    randomize = False
    dataset1, dataset2 = data1.split(sizes=sizes, randomize=randomize)

    num_iter = 0
    for _ in dataset1.create_dict_iterator():
        num_iter += 1
    assert num_iter == 3
    num_iter = 0
    for _ in dataset2.create_dict_iterator():
        num_iter += 1
    assert num_iter == 3
Beispiel #18
0
def test_coco_case_exception():
    try:
        data1 = ds.CocoDataset("path_not_exist/",
                               annotation_file=ANNOTATION_FILE,
                               task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except ValueError as e:
        assert "does not exist or permission denied" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file="./file_not_exist",
                               task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except ValueError as e:
        assert "does not exist or permission denied" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=ANNOTATION_FILE,
                               task="Invalid task")
        for _ in data1.__iter__():
            pass
        assert False
    except ValueError as e:
        assert "Invalid task type" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=LACKOFIMAGE_FILE,
                               task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "invalid node found in json" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=INVALID_CATEGORY_ID_FILE,
                               task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "category_id can't find in categories" in str(e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=INVALID_FILE,
                               task="Detection")
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "json.exception.parse_error" in str(e)

    try:
        sampler = ds.PKSampler(3)
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=INVALID_FILE,
                               task="Detection",
                               sampler=sampler)
        for _ in data1.__iter__():
            pass
        assert False
    except ValueError as e:
        assert "CocoDataset doesn't support PKSampler" in str(e)

    def exception_func(item):
        raise Exception("Error occur!")

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=ANNOTATION_FILE,
                               task="Detection")
        data1 = data1.map(operations=exception_func,
                          input_columns=["image"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=ANNOTATION_FILE,
                               task="Detection")
        data1 = data1.map(operations=vision.Decode(),
                          input_columns=["image"],
                          num_parallel_workers=1)
        data1 = data1.map(operations=exception_func,
                          input_columns=["image"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=ANNOTATION_FILE,
                               task="Detection")
        data1 = data1.map(operations=exception_func,
                          input_columns=["bbox"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=ANNOTATION_FILE,
                               task="Detection")
        data1 = data1.map(operations=exception_func,
                          input_columns=["category_id"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=ANNOTATION_FILE,
                               task="Stuff")
        data1 = data1.map(operations=exception_func,
                          input_columns=["image"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=ANNOTATION_FILE,
                               task="Stuff")
        data1 = data1.map(operations=vision.Decode(),
                          input_columns=["image"],
                          num_parallel_workers=1)
        data1 = data1.map(operations=exception_func,
                          input_columns=["image"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=ANNOTATION_FILE,
                               task="Stuff")
        data1 = data1.map(operations=exception_func,
                          input_columns=["segmentation"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=ANNOTATION_FILE,
                               task="Stuff")
        data1 = data1.map(operations=exception_func,
                          input_columns=["iscrowd"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=KEYPOINT_FILE,
                               task="Keypoint")
        data1 = data1.map(operations=exception_func,
                          input_columns=["image"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=KEYPOINT_FILE,
                               task="Keypoint")
        data1 = data1.map(operations=vision.Decode(),
                          input_columns=["image"],
                          num_parallel_workers=1)
        data1 = data1.map(operations=exception_func,
                          input_columns=["image"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=KEYPOINT_FILE,
                               task="Keypoint")
        data1 = data1.map(operations=exception_func,
                          input_columns=["keypoints"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=KEYPOINT_FILE,
                               task="Keypoint")
        data1 = data1.map(operations=exception_func,
                          input_columns=["num_keypoints"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=PANOPTIC_FILE,
                               task="Panoptic")
        data1 = data1.map(operations=exception_func,
                          input_columns=["image"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=PANOPTIC_FILE,
                               task="Panoptic")
        data1 = data1.map(operations=vision.Decode(),
                          input_columns=["image"],
                          num_parallel_workers=1)
        data1 = data1.map(operations=exception_func,
                          input_columns=["image"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=PANOPTIC_FILE,
                               task="Panoptic")
        data1 = data1.map(operations=exception_func,
                          input_columns=["bbox"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=PANOPTIC_FILE,
                               task="Panoptic")
        data1 = data1.map(operations=exception_func,
                          input_columns=["category_id"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)

    try:
        data1 = ds.CocoDataset(DATA_DIR,
                               annotation_file=PANOPTIC_FILE,
                               task="Panoptic")
        data1 = data1.map(operations=exception_func,
                          input_columns=["area"],
                          num_parallel_workers=1)
        for _ in data1.__iter__():
            pass
        assert False
    except RuntimeError as e:
        assert "map operation: [PyFunc] failed. The corresponding data files" in str(
            e)