Пример #1
0
def test_HWC2CHW_md5():
    """
    Test HWC2CHW(md5)
    """
    logger.info("Test HWC2CHW with md5 comparison")

    # First dataset
    data1 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    decode_op = c_vision.Decode()
    hwc2chw_op = c_vision.HWC2CHW()
    data1 = data1.map(input_columns=["image"], operations=decode_op)
    data1 = data1.map(input_columns=["image"], operations=hwc2chw_op)

    # expected md5 from images
    filename = "test_HWC2CHW_01_result.npz"
    save_and_check_md5(data1, filename, generate_golden=GENERATE_GOLDEN)
Пример #2
0
def test_resize_with_bbox_op_coco_c(plot_vis=False):
    """
    Prints images and bboxes side by side with and without ResizeWithBBox Op applied,
    tests with MD5 check, expected to pass
    Testing with COCO dataset
    """
    logger.info("test_resize_with_bbox_op_coco_c")

    # Load dataset
    dataCOCO1 = ds.CocoDataset(DATA_DIR_2[0],
                               annotation_file=DATA_DIR_2[1],
                               task="Detection",
                               decode=True,
                               shuffle=False)

    dataCOCO2 = ds.CocoDataset(DATA_DIR_2[0],
                               annotation_file=DATA_DIR_2[1],
                               task="Detection",
                               decode=True,
                               shuffle=False)

    test_op = c_vision.ResizeWithBBox(200)

    # map to apply ops

    dataCOCO2 = dataCOCO2.map(operations=[test_op],
                              input_columns=["image", "bbox"],
                              output_columns=["image", "bbox"],
                              column_order=["image", "bbox"])

    filename = "resize_with_bbox_op_01_c_coco_result.npz"
    save_and_check_md5(dataCOCO2, filename, generate_golden=GENERATE_GOLDEN)

    unaugSamp, augSamp = [], []

    for unAug, Aug in zip(
            dataCOCO1.create_dict_iterator(num_epochs=1, output_numpy=True),
            dataCOCO2.create_dict_iterator(num_epochs=1, output_numpy=True)):
        unaugSamp.append(unAug)
        augSamp.append(Aug)

    if plot_vis:
        visualize_with_bounding_boxes(unaugSamp, augSamp, annot_name="bbox")
Пример #3
0
def test_random_posterize_default_c_md5(plot=False, run_golden=True):
    """
    Test RandomPosterize C Op (default params) with md5 comparison
    """
    logger.info("test_random_posterize_default_c_md5")
    original_seed = config_get_set_seed(5)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)
    # define map operations
    transforms1 = [c_vision.Decode(), c_vision.RandomPosterize()]

    #  First dataset
    data1 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    data1 = data1.map(operations=transforms1, input_columns=["image"])
    #  Second dataset
    data2 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    data2 = data2.map(operations=[c_vision.Decode()], input_columns=["image"])

    image_posterize = []
    image_original = []
    for item1, item2 in zip(data1.create_dict_iterator(output_numpy=True),
                            data2.create_dict_iterator(output_numpy=True)):
        image1 = item1["image"]
        image2 = item2["image"]
        image_posterize.append(image1)
        image_original.append(image2)

    if run_golden:
        # check results with md5 comparison
        filename = "random_posterize_01_default_result_c.npz"
        save_and_check_md5(data1, filename, generate_golden=GENERATE_GOLDEN)

    if plot:
        visualize_list(image_original, image_posterize)

    # Restore configuration
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
def test_random_crop_and_resize_02():
    """
    Test RandomCropAndResize with md5 check:Image interpolation mode is Inter.NEAREST,
    expected to pass
    """
    logger.info("test_random_crop_and_resize_02")
    original_seed = config_get_set_seed(0)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # First dataset
    data1 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    decode_op = c_vision.Decode()
    random_crop_and_resize_op = c_vision.RandomResizedCrop(
        (256, 512), interpolation=mode.Inter.NEAREST)
    data1 = data1.map(operations=decode_op, input_columns=["image"])
    data1 = data1.map(operations=random_crop_and_resize_op,
                      input_columns=["image"])

    # Second dataset
    data2 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    transforms = [
        py_vision.Decode(),
        py_vision.RandomResizedCrop((256, 512),
                                    interpolation=mode.Inter.NEAREST),
        py_vision.ToTensor()
    ]
    transform = mindspore.dataset.transforms.py_transforms.Compose(transforms)
    data2 = data2.map(operations=transform, input_columns=["image"])

    filename1 = "random_crop_and_resize_02_c_result.npz"
    filename2 = "random_crop_and_resize_02_py_result.npz"
    save_and_check_md5(data1, filename1, generate_golden=GENERATE_GOLDEN)
    save_and_check_md5(data2, filename2, generate_golden=GENERATE_GOLDEN)

    # Restore config setting
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
Пример #5
0
def test_center_crop_md5(height=375, width=375):
    """
    Test CenterCrop
    """
    logger.info("Test CenterCrop")

    # First dataset
    data1 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    decode_op = vision.Decode()
    # 3 images [375, 500] [600, 500] [512, 512]
    center_crop_op = vision.CenterCrop([height, width])
    data1 = data1.map(input_columns=["image"], operations=decode_op)
    data1 = data1.map(input_columns=["image"], operations=center_crop_op)
    # Compare with expected md5 from images
    filename = "center_crop_01_result.npz"
    save_and_check_md5(data1, filename, generate_golden=GENERATE_GOLDEN)
def test_random_crop_decode_resize_md5():
    """
    Test RandomCropDecodeResize with md5 check
    """
    logger.info("Test RandomCropDecodeResize with md5 check")
    original_seed = config_get_set_seed(10)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # Generate dataset
    data = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
    random_crop_decode_resize_op = vision.RandomCropDecodeResize((256, 512), (1, 1), (0.5, 0.5))
    data = data.map(operations=random_crop_decode_resize_op, input_columns=["image"])
    # Compare with expected md5 from images
    filename = "random_crop_decode_resize_01_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)

    # Restore configuration
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers((original_num_parallel_workers))
Пример #7
0
def test_random_color_c(degrees=(0.1, 1.9), plot=False, run_golden=True):
    """
    Test Cpp RandomColor
    """
    logger.info("test_random_color_op")

    original_seed = config_get_set_seed(10)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # Decode with rgb format set to True
    data1 = ds.TFRecordDataset(C_DATA_DIR, C_SCHEMA_DIR, columns_list=["image"], shuffle=False)
    data2 = ds.TFRecordDataset(C_DATA_DIR, C_SCHEMA_DIR, columns_list=["image"], shuffle=False)

    # Serialize and Load dataset requires using vision.Decode instead of vision.Decode().
    if degrees is None:
        c_op = vision.RandomColor()
    else:
        c_op = vision.RandomColor(degrees)

    data1 = data1.map(input_columns=["image"], operations=[vision.Decode()])
    data2 = data2.map(input_columns=["image"], operations=[vision.Decode(), c_op])

    image_random_color_op = []
    image = []

    for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()):
        actual = item1["image"]
        expected = item2["image"]
        image.append(actual)
        image_random_color_op.append(expected)

    if run_golden:
        # Compare with expected md5 from images
        filename = "random_color_op_02_result.npz"
        save_and_check_md5(data2, filename, generate_golden=GENERATE_GOLDEN)

    if plot:
        visualize_list(image, image_random_color_op)

    # Restore configuration
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers((original_num_parallel_workers))
Пример #8
0
def test_to_type_02():
    """
    Test ToType Op with md5 comparison: valid input (str)
    Expect to pass
    """
    logger.info("test_to_type_02")
    # Generate dataset
    data = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
    transforms = [
        py_vision.Decode(),
        py_vision.ToTensor(),
        # Note: Convert to type int
        py_vision.ToType('int')
    ]
    transform = mindspore.dataset.transforms.py_transforms.Compose(transforms)
    data = data.map(operations=transform, input_columns=["image"])

    # Compare with expected md5 from images
    filename = "to_type_02_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
Пример #9
0
    def test_config(plot, file_name, op_list):
        data_dir = "../data/dataset/testImageNetData/train/"
        data1 = ds.ImageFolderDataset(dataset_dir=data_dir, shuffle=False)
        data1 = data1.map(operations=op_list, input_columns=["image"])
        data2 = ds.ImageFolderDataset(dataset_dir=data_dir, shuffle=False)
        data2 = data2.map(operations=c_vision.Decode(), input_columns=["image"])
        original_images = []
        transformed_images = []

        for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True):
            transformed_images.append(item["image"])
        for item in data2.create_dict_iterator(num_epochs=1, output_numpy=True):
            original_images.append(item["image"])

        if run_golden:
            # Compare with expected md5 from images
            save_and_check_md5(data1, file_name, generate_golden=GENERATE_GOLDEN)

        if plot:
            visualize_list(original_images, transformed_images)
Пример #10
0
def test_invert_md5_c():
    """
    Test Invert cpp op with md5 check
    """
    logger.info("Test Invert cpp op with md5 check")

    # Generate dataset
    ds = de.ImageFolderDatasetV2(dataset_dir=DATA_DIR, shuffle=False)

    transforms_invert = [
        C.Decode(),
        C.Resize(size=[224, 224]),
        C.Invert(),
        F.ToTensor()
    ]

    data = ds.map(input_columns="image", operations=transforms_invert)
    # Compare with expected md5 from images
    filename = "invert_01_result_c.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
Пример #11
0
def test_equalize_md5_c():
    """
    Test Equalize cpp op with md5 check
    """
    logger.info("Test Equalize cpp op with md5 check")

    # Generate dataset
    data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)

    transforms_equalize = [
        C.Decode(),
        C.Resize(size=[224, 224]),
        C.Equalize(),
        F.ToTensor()
    ]

    data = data_set.map(operations=transforms_equalize, input_columns="image")
    # Compare with expected md5 from images
    filename = "equalize_01_result_c.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
Пример #12
0
def test_resize_with_bbox_op_voc_c(plot_vis=False):
    """
    Prints images and bboxes side by side with and without ResizeWithBBox Op applied
    testing with VOC dataset
    """
    logger.info("test_resize_with_bbox_op_voc_c")

    # Load dataset
    dataVoc1 = ds.VOCDataset(DATA_DIR,
                             task="Detection",
                             usage="train",
                             shuffle=False,
                             decode=True)

    dataVoc2 = ds.VOCDataset(DATA_DIR,
                             task="Detection",
                             usage="train",
                             shuffle=False,
                             decode=True)

    test_op = c_vision.ResizeWithBBox(100)

    # map to apply ops
    dataVoc2 = dataVoc2.map(operations=[test_op],
                            input_columns=["image", "bbox"],
                            output_columns=["image", "bbox"],
                            column_order=["image", "bbox"])

    filename = "resize_with_bbox_op_01_c_voc_result.npz"
    save_and_check_md5(dataVoc2, filename, generate_golden=GENERATE_GOLDEN)

    unaugSamp, augSamp = [], []

    for unAug, Aug in zip(
            dataVoc1.create_dict_iterator(num_epochs=1, output_numpy=True),
            dataVoc2.create_dict_iterator(num_epochs=1, output_numpy=True)):
        unaugSamp.append(unAug)
        augSamp.append(Aug)

    if plot_vis:
        visualize_with_bounding_boxes(unaugSamp, augSamp)
def test_random_crop_and_resize_03():
    """
    Test RandomCropAndResize with md5 check: max_attempts is 1, expected to pass
    """
    logger.info("test_random_crop_and_resize_03")
    original_seed = config_get_set_seed(0)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # First dataset
    data1 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    decode_op = c_vision.Decode()
    random_crop_and_resize_op = c_vision.RandomResizedCrop((256, 512),
                                                           max_attempts=1)
    data1 = data1.map(input_columns=["image"], operations=decode_op)
    data1 = data1.map(input_columns=["image"],
                      operations=random_crop_and_resize_op)

    # Second dataset
    data2 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    transforms = [
        py_vision.Decode(),
        py_vision.RandomResizedCrop((256, 512), max_attempts=1),
        py_vision.ToTensor()
    ]
    transform = py_vision.ComposeOp(transforms)
    data2 = data2.map(input_columns=["image"], operations=transform())

    filename1 = "random_crop_and_resize_03_c_result.npz"
    filename2 = "random_crop_and_resize_03_py_result.npz"
    save_and_check_md5(data1, filename1, generate_golden=GENERATE_GOLDEN)
    save_and_check_md5(data2, filename2, generate_golden=GENERATE_GOLDEN)

    # Restore config setting
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
Пример #14
0
def test_rescale_md5():
    """
    Test Rescale with md5 comparison
    """
    logger.info("Test Rescale with md5 comparison")

    # generate dataset
    data = ds.TFRecordDataset(DATA_DIR,
                              SCHEMA_DIR,
                              columns_list=["image"],
                              shuffle=False)
    decode_op = vision.Decode()
    rescale_op = vision.Rescale(1.0 / 255.0, -1.0)

    # apply map operations on images
    data = data.map(operations=decode_op, input_columns=["image"])
    data = data.map(operations=rescale_op, input_columns=["image"])

    # check results with md5 comparison
    filename = "rescale_01_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
Пример #15
0
def test_random_crop_01_c():
    """
    Test RandomCrop op with c_transforms: size is a single integer, expected to pass
    """
    logger.info("test_random_crop_01_c")
    ds.config.set_seed(0)
    ds.config.set_num_parallel_workers(1)

    # Generate dataset
    data = ds.TFRecordDataset(DATA_DIR,
                              SCHEMA_DIR,
                              columns_list=["image"],
                              shuffle=False)
    # Note: If size is an int, a square crop of size (size, size) is returned.
    random_crop_op = c_vision.RandomCrop(512)
    decode_op = c_vision.Decode()
    data = data.map(input_columns=["image"], operations=decode_op)
    data = data.map(input_columns=["image"], operations=random_crop_op)

    filename = "random_crop_01_c_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
Пример #16
0
def test_random_vertical_valid_prob_c():
    """
    Test RandomVerticalFlip op with c_transforms: valid non-default input, expect to pass
    """
    logger.info("test_random_vertical_valid_prob_c")
    original_seed = config_get_set_seed(0)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # Generate dataset
    data = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
    decode_op = c_vision.Decode()
    random_horizontal_op = c_vision.RandomVerticalFlip(0.8)
    data = data.map(input_columns=["image"], operations=decode_op)
    data = data.map(input_columns=["image"], operations=random_horizontal_op)

    filename = "random_vertical_01_c_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)

    # Restore config setting
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
Пример #17
0
def test_random_crop_02_c():
    """
    Test RandomCrop op with c_transforms: size is a list/tuple with length 2, expected to pass
    """
    logger.info("test_random_crop_02_c")
    ds.config.set_seed(0)
    ds.config.set_num_parallel_workers(1)

    # Generate dataset
    data = ds.TFRecordDataset(DATA_DIR,
                              SCHEMA_DIR,
                              columns_list=["image"],
                              shuffle=False)
    # Note: If size is a sequence of length 2, it should be (height, width).
    random_crop_op = c_vision.RandomCrop([512, 375])
    decode_op = c_vision.Decode()
    data = data.map(input_columns=["image"], operations=decode_op)
    data = data.map(input_columns=["image"], operations=random_crop_op)

    filename = "random_crop_02_c_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
Пример #18
0
def test_random_crop_03_c():
    """
    Test RandomCrop op with c_transforms: input image size == crop size, expected to pass
    """
    logger.info("test_random_crop_03_c")
    ds.config.set_seed(0)
    ds.config.set_num_parallel_workers(1)

    # Generate dataset
    data = ds.TFRecordDataset(DATA_DIR,
                              SCHEMA_DIR,
                              columns_list=["image"],
                              shuffle=False)
    # Note: The size of the image is 4032*2268
    random_crop_op = c_vision.RandomCrop([2268, 4032])
    decode_op = c_vision.Decode()
    data = data.map(input_columns=["image"], operations=decode_op)
    data = data.map(input_columns=["image"], operations=random_crop_op)

    filename = "random_crop_03_c_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
Пример #19
0
def test_ten_crop_md5():
    """
    Tests TenCrops for giving the same results in multiple runs.
    Since TenCrop is a deterministic function, we expect it to return the same result for a specific input every time
    """
    logger.info("test_ten_crop_md5")

    data2 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    transforms_2 = [
        vision.Decode(),
        vision.TenCrop((200, 100), use_vertical_flip=True),
        lambda images: np.stack([vision.ToTensor()(image)
                                 for image in images])  # 4D stack of 10 images
    ]
    transform_2 = vision.ComposeOp(transforms_2)
    data2 = data2.map(input_columns=["image"], operations=transform_2())
    # Compare with expected md5 from images
    filename = "ten_crop_01_result.npz"
    save_and_check_md5(data2, filename, generate_golden=GENERATE_GOLDEN)
Пример #20
0
def test_resize_with_bbox_op_c(plot_vis=False):
    """
    Prints images and bboxes side by side with and without ResizeWithBBox Op applied,
    tests with MD5 check, expected to pass
    """
    logger.info("test_resize_with_bbox_op_c")

    # Load dataset
    dataVoc1 = ds.VOCDataset(DATA_DIR, task="Detection", mode="train",
                             decode=True, shuffle=False)

    dataVoc2 = ds.VOCDataset(DATA_DIR, task="Detection", mode="train",
                             decode=True, shuffle=False)

    test_op = c_vision.ResizeWithBBox(200)

    dataVoc1 = dataVoc1.map(input_columns=["annotation"],
                            output_columns=["annotation"],
                            operations=fix_annotate)
    dataVoc2 = dataVoc2.map(input_columns=["annotation"],
                            output_columns=["annotation"],
                            operations=fix_annotate)
    # map to apply ops
    dataVoc2 = dataVoc2.map(input_columns=["image", "annotation"],
                            output_columns=["image", "annotation"],
                            columns_order=["image", "annotation"],
                            operations=[test_op])

    filename = "resize_with_bbox_op_01_c_result.npz"
    save_and_check_md5(dataVoc2, filename, generate_golden=GENERATE_GOLDEN)

    unaugSamp, augSamp = [], []

    for unAug, Aug in zip(dataVoc1.create_dict_iterator(), dataVoc2.create_dict_iterator()):
        unaugSamp.append(unAug)
        augSamp.append(Aug)

    if plot_vis:
        visualize_with_bounding_boxes(unaugSamp, augSamp)
Пример #21
0
def test_random_sharpness_c_md5():
    """
    Test RandomSharpness cpp op with md5 comparison
    """
    logger.info("Test RandomSharpness cpp op with md5 comparison")
    original_seed = config_get_set_seed(200)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # define map operations
    transforms = [C.Decode(), C.RandomSharpness((10.0, 15.0))]

    #  Generate dataset
    data = de.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
    data = data.map(operations=transforms, input_columns=["image"])

    # check results with md5 comparison
    filename = "random_sharpness_cpp_01_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)

    # Restore configuration
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
Пример #22
0
def test_to_pil_01():
    """
    Test ToPIL Op with md5 comparison: input is already PIL image
    Expected to pass
    """
    logger.info("test_to_pil_01")

    # Generate dataset
    data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
    transforms = [
        py_vision.Decode(),
        # If input is already PIL image.
        py_vision.ToPIL(),
        py_vision.CenterCrop(375),
        py_vision.ToTensor()
    ]
    transform = py_vision.ComposeOp(transforms)
    data1 = data1.map(input_columns=["image"], operations=transform())

    # Compare with expected md5 from images
    filename = "to_pil_01_result.npz"
    save_and_check_md5(data1, filename, generate_golden=GENERATE_GOLDEN)
Пример #23
0
def test_random_color_adjust_md5():
    """
    Test RandomColorAdjust with md5 check
    """
    logger.info("Test RandomColorAdjust with md5 check")
    original_seed = config_get_set_seed(10)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # First dataset
    data1 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    decode_op = c_vision.Decode()
    random_adjust_op = c_vision.RandomColorAdjust(0.4, 0.4, 0.4, 0.1)
    data1 = data1.map(operations=decode_op, input_columns=["image"])
    data1 = data1.map(operations=random_adjust_op, input_columns=["image"])

    # Second dataset
    transforms = [
        py_vision.Decode(),
        py_vision.RandomColorAdjust(0.4, 0.4, 0.4, 0.1),
        py_vision.ToTensor()
    ]
    transform = mindspore.dataset.transforms.py_transforms.Compose(transforms)
    data2 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    data2 = data2.map(operations=transform, input_columns=["image"])
    # Compare with expected md5 from images
    filename = "random_color_adjust_01_c_result.npz"
    save_and_check_md5(data1, filename, generate_golden=GENERATE_GOLDEN)
    filename = "random_color_adjust_01_py_result.npz"
    save_and_check_md5(data2, filename, generate_golden=GENERATE_GOLDEN)

    # Restore configuration
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
Пример #24
0
def test_random_posterize_op_fixed_point_c(plot=False, run_golden=True):
    """
    Test RandomPosterize in C transformations with fixed point
    """
    logger.info("test_random_posterize_op_c")

    # define map operations
    transforms1 = [c_vision.Decode(), c_vision.RandomPosterize(1)]

    #  First dataset
    data1 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    data1 = data1.map(operations=transforms1, input_columns=["image"])
    #  Second dataset
    data2 = ds.TFRecordDataset(DATA_DIR,
                               SCHEMA_DIR,
                               columns_list=["image"],
                               shuffle=False)
    data2 = data2.map(operations=[c_vision.Decode()], input_columns=["image"])

    image_posterize = []
    image_original = []
    for item1, item2 in zip(
            data1.create_dict_iterator(num_epochs=1, output_numpy=True),
            data2.create_dict_iterator(num_epochs=1, output_numpy=True)):
        image1 = item1["image"]
        image2 = item2["image"]
        image_posterize.append(image1)
        image_original.append(image2)

    if run_golden:
        # check results with md5 comparison
        filename = "random_posterize_fixed_point_01_result_c.npz"
        save_and_check_md5(data1, filename, generate_golden=GENERATE_GOLDEN)

    if plot:
        visualize_list(image_original, image_posterize)
Пример #25
0
def test_five_crop_md5():
    """
    Test FiveCrop with md5 check
    """
    logger.info("test_five_crop_md5")

    # First dataset
    data = ds.TFRecordDataset(DATA_DIR,
                              SCHEMA_DIR,
                              columns_list=["image"],
                              shuffle=False)
    transforms = [
        vision.Decode(),
        vision.FiveCrop(100),
        lambda images: np.stack([vision.ToTensor()(image)
                                 for image in images])  # 4D stack of 5 images
    ]
    transform = vision.ComposeOp(transforms)
    data = data.map(input_columns=["image"], operations=transform())
    # Compare with expected md5 from images
    filename = "five_crop_01_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)
Пример #26
0
def test_random_color_md5():
    """
    Test RandomColor with md5 check
    """
    logger.info("Test RandomColor with md5 check")
    original_seed = config_get_set_seed(10)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # Generate dataset
    data = de.ImageFolderDatasetV2(dataset_dir=DATA_DIR, shuffle=False)

    transforms = F.ComposeOp(
        [F.Decode(), F.RandomColor((0.1, 1.9)),
         F.ToTensor()])

    data = data.map(input_columns="image", operations=transforms())
    # Compare with expected md5 from images
    filename = "random_color_01_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)

    # Restore configuration
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers((original_num_parallel_workers))
Пример #27
0
def test_random_crop_08_c():
    """
    Test RandomCrop op with c_transforms: padding_mode is Border.EDGE,
    expected to pass
    """
    logger.info("test_random_crop_08_c")
    original_seed = config_get_set_seed(0)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # Generate dataset
    data = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
    # Note: The padding_mode is Border.EDGE.
    random_crop_op = c_vision.RandomCrop(512, [200, 200, 200, 200], padding_mode=mode.Border.EDGE)
    decode_op = c_vision.Decode()
    data = data.map(operations=decode_op, input_columns=["image"])
    data = data.map(operations=random_crop_op, input_columns=["image"])

    filename = "random_crop_08_c_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)

    # Restore config setting
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
Пример #28
0
def test_random_sharpness_py_md5():
    """
    Test RandomSharpness python op with md5 comparison
    """
    logger.info("Test RandomSharpness python op with md5 comparison")
    original_seed = config_get_set_seed(5)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # define map operations
    transforms = [F.Decode(), F.RandomSharpness((20.0, 25.0)), F.ToTensor()]
    transform = mindspore.dataset.transforms.py_transforms.Compose(transforms)

    #  Generate dataset
    data = de.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False)
    data = data.map(operations=transform, input_columns=["image"])

    # check results with md5 comparison
    filename = "random_sharpness_py_01_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)

    # Restore configuration
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
Пример #29
0
def test_cutmix_batch_nhwc_md5():
    """
    Test CutMixBatch on a batch of HWC images with MD5:
    """
    logger.info("test_cutmix_batch_nhwc_md5")
    original_seed = config_get_set_seed(0)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # CutMixBatch Images
    data = ds.Cifar10Dataset(DATA_DIR, num_samples=10, shuffle=False)

    one_hot_op = data_trans.OneHot(num_classes=10)
    data = data.map(operations=one_hot_op, input_columns=["label"])
    cutmix_batch_op = vision.CutMixBatch(mode.ImageBatchFormat.NHWC)
    data = data.batch(5, drop_remainder=True)
    data = data.map(operations=cutmix_batch_op, input_columns=["image", "label"])

    filename = "cutmix_batch_c_nhwc_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)

    # Restore config setting
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)
Пример #30
0
def test_mixup_batch_md5():
    """
    Test MixUpBatch with MD5:
    """
    logger.info("test_mixup_batch_md5")
    original_seed = config_get_set_seed(0)
    original_num_parallel_workers = config_get_set_num_parallel_workers(1)

    # MixUp Images
    data = ds.Cifar10Dataset(DATA_DIR, num_samples=10, shuffle=False)

    one_hot_op = data_trans.OneHot(num_classes=10)
    data = data.map(input_columns=["label"], operations=one_hot_op)
    mixup_batch_op = vision.MixUpBatch()
    data = data.batch(5, drop_remainder=True)
    data = data.map(input_columns=["image", "label"], operations=mixup_batch_op)

    filename = "mixup_batch_c_result.npz"
    save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN)

    # Restore config setting
    ds.config.set_seed(original_seed)
    ds.config.set_num_parallel_workers(original_num_parallel_workers)