def process_image(input_img_path, output_img_path, projection, method): # Grab a free process with ProcessManager.process(): # initalize biowriter and bioreader with BioReader(input_img_path, max_workers=ProcessManager._active_threads) as br, \ BioWriter(output_img_path, metadata=br.metadata, max_workers=ProcessManager._active_threads) as bw: # output image is 2d bw.Z = 1 # iterate along the x,y direction for x in range(0, br.X, tile_size): x_max = min([br.X, x + tile_size]) for y in range(0, br.Y, tile_size): y_max = min([br.Y, y + tile_size]) ProcessManager.submit_thread(projection, br, bw, (x, x_max), (y, y_max), method=method) ProcessManager.join_threads()
def write_slide(self): with ProcessManager.process(f'{self.base_path} - {self.output_depth}'): ProcessManager.submit_thread(self._write_slide) ProcessManager.join_threads()
def assemble_image(vector_path: pathlib.Path, out_path: pathlib.Path, depth: int) -> None: """Assemble a 2d or 3d image This method assembles one image from one stitching vector. It can assemble both 2d and z-stacked 3d images It is intended to run as a process to parallelize stitching of multiple images. The basic approach to stitching is: 1. Parse the stitching vector and abstract the image dimensions 2. Generate a thread for each subsection (supertile) of an image. Args: vector_path: Path to the stitching vector out_path: Path to the output directory depth: depth of the input images """ # Grab a free process with ProcessManager.process(): # Parse the stitching vector parsed_vector = _parse_stitch(vector_path, timesliceNaming) # Initialize the output image with BioReader(parsed_vector['filePos'][0]['file']) as br: bw = BioWriter(out_path.joinpath(parsed_vector['name']), metadata=br.metadata, max_workers=ProcessManager._active_threads) bw.x = parsed_vector['width'] bw.y = parsed_vector['height'] bw.z = depth # Assemble the images ProcessManager.log(f'Begin assembly') for z in range(depth): ProcessManager.log(f'Assembling Z position : {z}') for x in range(0, parsed_vector['width'], chunk_size): X_range = min(x + chunk_size, parsed_vector['width'] ) # max x-pixel index in the assembled image for y in range(0, parsed_vector['height'], chunk_size): Y_range = min(y + chunk_size, parsed_vector['height'] ) # max y-pixel index in the assembled image ProcessManager.submit_thread(make_tile, x, X_range, y, Y_range, z, parsed_vector, bw) ProcessManager.join_threads() bw.close()
def unshade_batch(files: typing.List[Path], out_dir: Path, brightfield: Path, darkfield: Path, photobleach: typing.Optional[Path] = None): if photobleach != None: with open(photobleach, 'r') as f: reader = csv.reader(f) photo_offset = { line[0]: float(line[1]) for line in reader if line[0] != 'file' } offset = np.mean([o for o in photo_offset.values()]) else: offset = None with ProcessManager.process(): with BioReader(brightfield, max_workers=2) as bf: brightfield_image = bf[:, :, :, 0, 0].squeeze() with BioReader(darkfield, max_workers=2) as df: darkfield_image = df[:, :, :, 0, 0].squeeze() threads = [] for file in files: if photobleach != None: pb = photo_offset[file['file']] else: pb = None ProcessManager.submit_thread(unshade_image, file['file'], out_dir, brightfield_image, darkfield_image, pb, offset) ProcessManager.join_threads()
# We only need a thread manager since labeling and image reading/writing # release the gil ProcessManager.init_threads() # Get all file names in inpDir image collection _files = list( filter( lambda _file: _file.is_file() and _file.name.endswith('.ome.tif'), _input_dir.iterdir())) _small_files, _large_files = filter_by_size(_files, 500) logger.info(f'processing {len(_files)} images in total...') logger.info(f'processing {len(_small_files)} small images with cython...') logger.info(f'processing {len(_large_files)} large images with rust') if _small_files: for _infile in _small_files: ProcessManager.submit_thread( label_cython, _infile, _output_dir.joinpath(get_output_name(_infile.name)), _connectivity, ) ProcessManager.join_threads() if _large_files: for _infile in _large_files: _outfile = _output_dir.joinpath(get_output_name(_infile.name)) PolygonSet(_connectivity).read_from(_infile).write_to(_outfile)