def create_zipfile(zip_filepath, splits_with_ids, suffixes, maindir=None): """Generates a zipfile with a cityscapes-like file structure and random pngs. Args: zip_filepath (str): filepath to the zip archive that will be created splits_with_ids (Dict[str, List[str]]): data-splits like 'train' or 'val' that map to a list of image ids suffixes (List[str]): suffix per modality that should be created e.g. 'leftImg8bit' maindir (str): name of the root directory of the zipfile, defaults to the name of the zipfile """ with zipfile.ZipFile(zip_filepath, 'w') as z: for split, ids in splits_with_ids.items(): if maindir is None: maindir = os.path.basename(zip_filepath).strip('.zip') split = os.path.join(maindir, split) for img_id in ids: city = CITY_IN_ID_RE.match(img_id).group(1) for suffix in suffixes: if 'Img' in suffix: img = get_random_png(height=1024, width=2048, channels=3) else: img = get_random_png(height=1024, width=2048, channels=1) z.write( img, os.path.join(split, city, '{}_{}.png'.format(img_id, suffix)))
def create_zip(fname, prefix): out_path = os.path.join(_output_dir(), fname) png = fake_data_utils.get_random_png(height=1, width=1) with zipfile.ZipFile(out_path, 'w') as myzip: myzip.write(png, prefix + 'rock/0.png') myzip.write(png, prefix + 'paper/0.png') myzip.write(png, prefix + 'scissors/0.png')
def _create_zip_files(): """Saves png and label using name index.""" if not os.path.exists(_output_dir()): os.makedirs(_output_dir()) images_out_path = os.path.join(_output_dir(), "data_object_image_2.zip") with zipfile.ZipFile(images_out_path, "w") as image_zip: for i in range(NUM_IMAGES): png = fake_data_utils.get_random_png(HEIGHT, WIDTH) image_zip.write( png, os.path.join("training", "image_2", "image_{:06d}.png".format(i))) label_out_path = os.path.join(_output_dir(), "data_object_label_2.zip") with zipfile.ZipFile(label_out_path, "w") as label_zip: for i in range(NUM_IMAGES): annotation = _get_annotations() label = _get_label_file(annotation) label_zip.write( label, os.path.join("training", "label_2", "label_{:06d}.txt".format(i))) devkit_out_path = os.path.join(_output_dir(), "devkit_object.zip") with zipfile.ZipFile(devkit_out_path, "w") as devkit_zip: train_rand, train_mapping = _get_mapping_files() devkit_zip.write(train_rand, os.path.join("mapping", "train_rand.txt")) devkit_zip.write(train_mapping, os.path.join("mapping", "train_mapping.txt"))
def _generate_image(fdir, fname): dirname = os.path.join(_output_dir(), fdir) if not os.path.exists(dirname): os.makedirs(dirname) tf.io.gfile.copy(fake_data_utils.get_random_png(1, 1), os.path.join(dirname, fname), overwrite=True)
def create_images(label): images_dir = _output_dir() if not tf.io.gfile.exists(images_dir): tf.io.gfile.makedirs(images_dir) for l in label: image_name = 'obj1_{}.png'.format(l) tf.io.gfile.copy(fake_data_utils.get_random_png(128, 128), os.path.join(images_dir, image_name), overwrite=True)
def create_folder(fname): images_dir = os.path.join(_output_dir(), fname) if not tf.io.gfile.exists(images_dir): tf.io.gfile.makedirs(images_dir) for i in range(2): image_name = 'C189P150ThinF_IMG_20151203_141809_cell_{:03d}.png'.format( i) tf.io.gfile.copy(fake_data_utils.get_random_png(300, 300), os.path.join(images_dir, image_name), overwrite=True)
def _get_synset(synset_name): """Returns path to synset archive.""" fobj = tempfile.NamedTemporaryFile(delete=False, mode='wb', suffix='.tar') tar = tarfile.open(mode='w', fileobj=fobj) for i in range(1, TRAIN_IMAGES_PER_SYNSET + 1): fname = '%s_%s.JPEG' % (synset_name, i) # There are a few PNG and CMYK images: if synset_name == 'n01440764' and i == 1: path = fake_data_utils.get_random_png() elif synset_name == 'n01440764' and i in [2, 3]: path = os.path.join(FLAGS.tfds_dir, 'testing', 'test_data', '6pixels_cmyk.jpeg') else: path = fake_data_utils.get_random_jpeg() tar.add(path, arcname=fname) fobj.close() return fobj.name
def _generate_data(): """Generate images archive.""" # Generate images images_dir = os.path.join(_output_dir(), 'images') if not tf.io.gfile.exists(images_dir): tf.io.gfile.makedirs(images_dir) for i in range(_TRAIN_IMAGES_NUMBER + _TEST_IMAGES_NUMBER): image_name = 'image{:03d}.jpg'.format(i) tf.io.gfile.copy(fake_data_utils.get_random_jpeg(), os.path.join(images_dir, image_name), overwrite=True) # Generate annotations annotations_dir = os.path.join(_output_dir(), 'annotations') if not tf.io.gfile.exists(annotations_dir): tf.io.gfile.makedirs(annotations_dir) # Generate trimaps trimaps_dir = os.path.join(annotations_dir, 'trimaps') if not tf.io.gfile.exists(trimaps_dir): tf.io.gfile.makedirs(trimaps_dir) global_count = 0 for filename, num_examples in [('trainval.txt', _TRAIN_IMAGES_NUMBER), ('test.txt', _TEST_IMAGES_NUMBER)]: fobj = tempfile.NamedTemporaryFile(delete=False, mode='w') with fobj: for i in range(num_examples): fobj.write('image{:03d} {} 0 0\n'.format(global_count, i % 37)) global_count += 1 tf.io.gfile.copy(fobj.name, os.path.join(annotations_dir, filename), overwrite=True) # Create trimaps for i in range(_TRAIN_IMAGES_NUMBER + _TEST_IMAGES_NUMBER): trimap_name = 'image{:03d}.png'.format(i) tf.io.gfile.copy(fake_data_utils.get_random_png(channels=1), os.path.join(trimaps_dir, trimap_name), overwrite=True)
def _generate_data(split): """Generate images archive.""" # Generate images images_dir = os.path.join(_output_dir(), 'images', split) if not tf.io.gfile.exists(images_dir): tf.io.gfile.makedirs(images_dir) for i in range(_IMAGE_NUMBERS[split]): image_name = 'CLEVR_{}_{:06d}.png'.format(split, i) tf.io.gfile.copy(fake_data_utils.get_random_png(), os.path.join(images_dir, image_name), overwrite=True) if split in ['train', 'val']: # Generate annotations scenes_dir = os.path.join(_output_dir(), 'scenes') if not tf.io.gfile.exists(scenes_dir): tf.io.gfile.makedirs(scenes_dir) annotations = { 'scenes': [{ 'objects': [{ 'color': 'red', 'shape': 'sphere', 'size': 'small', 'material': 'rubber', '3d_coords': [0.0, 0.0, 0.0], 'pixel_coords': [0.0, 0.0, 0.0], 'rotation': 0.0 }] * _NUM_OBJECTS }] * _IMAGE_NUMBERS[split] } annotations_file = os.path.join(scenes_dir, 'CLEVR_{}_scenes.json'.format(split)) with tf.io.gfile.GFile(annotations_file, 'w') as f: json.dump(annotations, f)
def create_zip(fname): out_path = os.path.join(_output_dir(), fname) png = fake_data_utils.get_random_png(height=1, width=1) with zipfile.ZipFile(out_path, 'w') as myzip: myzip.write(png, 'horses/0.png') myzip.write(png, 'humans/0.png')