def setUp(self): """Test setup.""" image_height = 40 image_width = 30 image_channels = 3 image_fn = functools.partial(test_utils.make_random_image, image_height, image_width, image_channels) data = test_utils.get_test_data() image_uri_key = schema.get_key(schema.ImageUriType, schema.image_csv_schema) num_records = len(data[image_uri_key]) image_uris = data.pop(image_uri_key) data['image_name'] = [os.path.split(uri)[-1] for uri in image_uris] data.update({ 'image': [beam_image.encode(image_fn()) for _ in range(num_records)], 'image_height': [image_height] * num_records, 'image_width': [image_width] * num_records, 'image_channels': [image_channels] * num_records, }) self.num_records = num_records self.data = data self.dataset = tf.data.Dataset.from_tensor_slices(self.data)
def setUp(self): """Test setup.""" image_height = 40 image_width = 30 image_channels = 3 image_fn = functools.partial(test_utils.make_random_image, image_height, image_width, image_channels) data = test_utils.get_test_data() schema = input_schema.IMAGE_CSV_SCHEMA image_uri_key = schema.image_uri_key num_records = len(data[image_uri_key]) image_uris = data.pop(image_uri_key) data['image_name'] = [os.path.split(uri)[-1] for uri in image_uris] data.update({ 'image': [beam_image.encode(image_fn()) for _ in range(num_records)], 'image_height': [image_height] * num_records, 'image_width': [image_width] * num_records, 'image_channels': [image_channels] * num_records, }) self.tfrecord_dir = 'gs://path/to/tfrecords/dir' self.split = 'TRAIN' self.num_records = num_records self.data = data self.dataset = tf.data.Dataset.from_tensor_slices(self.data)
def test_base64_encode(self): """Tests encode function.""" img = beam_image.load(self.image_file) enc = beam_image.encode(img) decode = base64.b64decode(enc, altchars=beam_image.BASE64_ALTCHARS) self.assertEqual(img.tobytes(), decode)