def main(_): if not FLAGS.dataset_name: raise ValueError( 'You must supply the dataset name with --dataset_name') if not FLAGS.dataset_dir: raise ValueError( 'You must supply the dataset directory with --dataset_dir') if FLAGS.dataset_name == 'cifar10': download_and_convert_cifar10.run(FLAGS.dataset_dir) else: raise ValueError('dataset_name [%s] was not recognized.' % FLAGS.dataset_name)
def run_cifar10(): local_dir = '/tmp/cifar10' try: os.mkdir(local_dir) except: pass cifar10.run(local_dir) print("Uploading CIFAR10 to " + str(FLAGS.data_dir)) stored_files = [] for r, d, f in os.walk(local_dir): for file in f: stored_files.append(os.path.join(r, file)) bucket_name = str(FLAGS.data_dir).replace('gs://', '') storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) for stored_file in stored_files: source_file_name = stored_file file = stored_file.replace(local_dir, '') if (file[0] == '/'): file = file[1:] destination_blob_name = os.path.join('CIFAR10', file) blob = bucket.blob(destination_blob_name) blob.upload_from_filename(source_file_name) try: shutil.rmtree(local_dir, ignore_errors=True) except: pass print("Finished Downloading & upliading CIFAR10 dataset")