def custom_check_existing_pipeline(): for url in IMAGE_URLS: outpath = keep_filename_save_png_in_tempdir(url) delete_file_if_exists(outpath) yield Pipeline( load_func=load_image_from_url, write_func=write_image, ) for url in IMAGE_URLS: outpath = keep_filename_save_png_in_tempdir(url) delete_file_if_exists(outpath)
def trim_resize_pipeline(): for url in IMAGE_URLS: outpath = keep_filename_save_png_in_tempdir(url) delete_file_if_exists(outpath) trim_bottom_100 = lambda image: image[:-100, :] # noqa: 29 resize_224 = partial(resize, shape=IMAGE_RESIZE_SHAPE) trim_resize_pipeline = DownloadImagePipeline() trim_resize_pipeline.ops = [trim_bottom_100, resize_224] yield trim_resize_pipeline for url in IMAGE_URLS: outpath = keep_filename_save_png_in_tempdir(url) delete_file_if_exists(outpath)
def record_mean_brightness_pipeline(): for url in IMAGE_URLS: outpath = keep_filename_save_png_in_tempdir(url) delete_file_if_exists(outpath) record_mean_brightness_pipeline = CustomReportingPipeline( load_func=load_image_from_url, ops=record_mean_brightness, write_func=write_image, ) yield record_mean_brightness_pipeline for url in IMAGE_URLS: outpath = keep_filename_save_png_in_tempdir(url) delete_file_if_exists(outpath)
def test_trim_resize_pipeline_str_paths(trim_resize_pipeline): inpaths = [str(path) for path in IMAGE_URLS] trim_resize_pipeline(inpaths=inpaths, path_func=keep_filename_save_png_in_tempdir, n_jobs=6) for path in inpaths: outpath = keep_filename_save_png_in_tempdir(path) image = plt.imread(str(outpath)) assert image.shape[:2] == IMAGE_RESIZE_SHAPE
def test_duplicate_outpath_pipeline(): inpaths = [SAMPLE_DATA_DIR / 'blue.png'] * 1_000 outpath = keep_filename_save_png_in_tempdir(inpaths[0]) delete_file_if_exists(outpath) pipeline = Pipeline(load_func=load_image_from_disk, write_func=write_image) pipeline(inpaths=inpaths, path_func=keep_filename_save_png_in_tempdir, n_jobs=100) delete_file_if_exists(outpath) assert pipeline.run_report_.loc[:, "error"].isna().all()