def test_normalizepad_op_c(plot=False): """ Test NormalizePad in cpp transformations """ logger.info("Test Normalize in cpp") mean = [121.0, 115.0, 100.0] std = [70.0, 68.0, 71.0] # define map operations decode_op = c_vision.Decode() normalizepad_op = c_vision.NormalizePad(mean, std) # First dataset data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False) data1 = data1.map(operations=decode_op, input_columns=["image"]) data1 = data1.map(operations=normalizepad_op, input_columns=["image"]) # Second dataset data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False) data2 = data2.map(operations=decode_op, input_columns=["image"]) num_iter = 0 for item1, item2 in zip(data1.create_dict_iterator(num_epochs=1, output_numpy=True), data2.create_dict_iterator(num_epochs=1, output_numpy=True)): image_de_normalized = item1["image"] image_original = item2["image"] image_np_normalized = normalizepad_np(image_original, mean, std) mse = diff_mse(image_de_normalized, image_np_normalized) logger.info("image_{}, mse: {}".format(num_iter + 1, mse)) assert mse < 0.01 if plot: visualize_image(image_original, image_de_normalized, mse, image_np_normalized) num_iter += 1
def create_dataset(dataset_path, do_train, repeat_num=1, batch_size=32, target="GPU", dtype="fp16", device_num=1): ds.config.set_numa_enable(True) if device_num == 1: data_set = ds.ImageFolderDataset(dataset_path, num_parallel_workers=4, shuffle=True) else: data_set = ds.ImageFolderDataset(dataset_path, num_parallel_workers=4, shuffle=True, num_shards=device_num, shard_id=get_rank()) image_size = 224 mean = [0.485 * 255, 0.456 * 255, 0.406 * 255] std = [0.229 * 255, 0.224 * 255, 0.225 * 255] # define map operations normalize_op = C.Normalize(mean=mean, std=std) if dtype == "fp16": if args_opt.eval: x_dtype = "float32" else: x_dtype = "float16" normalize_op = C.NormalizePad(mean=mean, std=std, dtype=x_dtype) if do_train: trans = [ C.RandomCropDecodeResize(image_size, scale=(0.08, 1.0), ratio=(0.75, 1.333)), C.RandomHorizontalFlip(prob=0.5), normalize_op, ] else: trans = [ C.Decode(), C.Resize(256), C.CenterCrop(image_size), normalize_op, ] if dtype == "fp32": trans.append(C.HWC2CHW()) data_set = data_set.map(operations=trans, input_columns="image", num_parallel_workers=8) # apply batch operations data_set = data_set.batch(batch_size, drop_remainder=True) # apply dataset repeat operation if repeat_num > 1: data_set = data_set.repeat(repeat_num) return data_set
def test_normalizepad_exception_unequal_size_c(): """ Test NormalizePad in c transformation: len(mean) != len(std) expected to raise ValueError """ logger.info("test_normalize_exception_unequal_size_c") try: _ = c_vision.NormalizePad([100, 250, 125], [50, 50, 75, 75]) except ValueError as e: logger.info("Got an exception in DE: {}".format(str(e))) assert str(e) == "Length of mean and std must be equal." try: _ = c_vision.NormalizePad([100, 250, 125], [50, 50, 75], 1) except TypeError as e: logger.info("Got an exception in DE: {}".format(str(e))) assert str(e) == "dtype should be string." try: _ = c_vision.NormalizePad([100, 250, 125], [50, 50, 75], "") except ValueError as e: logger.info("Got an exception in DE: {}".format(str(e))) assert str(e) == "dtype only support float32 or float16."
def test_decode_normalizepad_op(): """ Test Decode op followed by NormalizePad op """ logger.info("Test [Decode, Normalize] in one Map") data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image", "label"], num_parallel_workers=1, shuffle=False) # define map operations decode_op = c_vision.Decode() normalizepad_op = c_vision.NormalizePad([121.0, 115.0, 100.0], [70.0, 68.0, 71.0], "float16") # apply map operations on images data1 = data1.map(operations=[decode_op, normalizepad_op], input_columns=["image"]) num_iter = 0 for item in data1.create_dict_iterator(num_epochs=1, output_numpy=True): logger.info("Looping inside iterator {}".format(num_iter)) assert item["image"].dtype == np.float16 num_iter += 1