def child_process_download(meta, cache, mip, compress_cache, dest_bbox, fill_missing, progress, location, use_shared_memory, green, cloudpaths): reset_connection_pools() # otherwise multi-process hangs shape = list(dest_bbox.size3()) + [meta.num_channels] if use_shared_memory: array_like, dest_img = shm.ndarray(shape, dtype=meta.dtype, location=location, lock=fs_lock) else: array_like, dest_img = shm.ndarray_fs(shape, dtype=meta.dtype, location=location, emulate_shm=False, lock=fs_lock) def process(src_img, src_bbox): shade(dest_img, dest_bbox, src_img, src_bbox) download_chunks_threaded(meta, cache, mip, cloudpaths, fn=process, fill_missing=fill_missing, progress=progress, compress_cache=compress_cache, green=green) array_like.close()
def child_upload_process( meta, cache, img_shape, offset, mip, compress, cdn_cache, progress, location, location_bbox, location_order, delete_black_uploads, background_color, green, chunk_ranges, compress_level=None, ): global fs_lock reset_connection_pools() shared_shape = img_shape if location_bbox: shared_shape = list(location_bbox.size3()) + [meta.num_channels] array_like, renderbuffer = shm.ndarray(shape=shared_shape, dtype=meta.dtype, location=location, order=location_order, lock=fs_lock, readonly=True) if location_bbox: cutout_bbox = Bbox(offset, offset + img_shape[:3]) delta_box = cutout_bbox.clone() - location_bbox.minpt renderbuffer = renderbuffer[delta_box.to_slices()] threaded_upload_chunks( meta, cache, renderbuffer, mip, chunk_ranges, compress=compress, cdn_cache=cdn_cache, progress=progress, delete_black_uploads=delete_black_uploads, background_color=background_color, green=green, compress_level=compress_level, ) array_like.close()
def _download_and_store_cv_files_thread(args): """ Helper thread to download files from google cloud """ chunk_id, cv_url, file_paths, olduint32 = args # Reset connection pool to make cloud-volume compatible with parallelizing storage.reset_connection_pools() n_file_paths = len(file_paths) time_start = time.time() with storage.SimpleStorage(cv_url) as cv_st: for i_fp, fp in enumerate(file_paths): if i_fp % 100 == 1: dt = time.time() - time_start eta = dt / i_fp * n_file_paths - dt print("%d: %d / %d - dt: %.3fs - eta: %.3fs" % (chunk_id, i_fp, n_file_paths, dt, eta)) creator_utils.download_and_store_edge_file(cv_st, fp)