def main(input_dir: pathlib.Path, output_dir: pathlib.Path, file_pattern: typing.Optional[str] = None, group_by: typing.Optional[str] = None, get_darkfield: typing.Optional[bool] = None, get_photobleach: typing.Optional[bool] = None, metadata_dir: pathlib.Path = None) -> None: if group_by is None: group_by = 'xyp' if get_darkfield is None: get_darkfield = False if get_photobleach is None: get_photobleach = False if file_pattern is None: filepattern = '.*' fp = FilePattern(input_dir, file_pattern) ProcessManager.init_processes("basic") for files in fp(group_by=group_by): ProcessManager.submit_process(basic.basic, files, output_dir, metadata_dir, get_darkfield, get_photobleach) ProcessManager.join_processes()
def main(imgPath: pathlib.Path, stitchPath: pathlib.Path, outDir: pathlib.Path, timesliceNaming: typing.Optional[bool] ) -> None: '''Setup stitching variables/objects''' # Get a list of stitching vectors vectors = list(stitchPath.iterdir()) vectors.sort() # Try to infer a filepattern from the files on disk for faster matching later global fp # make the filepattern global to share between processes try: pattern = filepattern.infer_pattern([f.name for f in imgPath.iterdir()]) logger.info(f'Inferred file pattern: {pattern}') fp = filepattern.FilePattern(imgPath,pattern) # Pattern inference didn't work, so just get a list of files except: logger.info(f'Unable to infer pattern, defaulting to: .*') fp = filepattern.FilePattern(imgPath,'.*') '''Run stitching jobs in separate processes''' ProcessManager.init_processes('main','asmbl') for v in vectors: # Check to see if the file is a valid stitching vector if 'img-global-positions' not in v.name: continue ProcessManager.submit_process(assemble_image,v,outDir) ProcessManager.join_processes()
def main(imgDir: Path, imgPattern: str, ffDir: Path, brightPattern: str, outDir: Path, darkPattern: typing.Optional[str] = None, photoPattern: typing.Optional[str] = None) -> None: ''' Start a process for each set of brightfield/darkfield/photobleach patterns ''' # Create the FilePattern objects to handle file access ff_files = FilePattern(ffDir, brightPattern) fp = FilePattern(imgDir, imgPattern) if darkPattern != None and darkPattern != '': dark_files = FilePattern(ffDir, darkPattern) if photoPattern != None and photoPattern != '': photo_files = FilePattern( str(Path(ffDir).parents[0].joinpath('metadata').absolute()), photoPattern) group_by = [v for v in fp.variables if v not in ff_files.variables] GROUPED = group_by + ['file'] ProcessManager.init_processes('main', 'unshade') for files in fp(group_by=group_by): flat_path = ff_files.get_matching( **{k.upper(): v for k, v in files[0].items() if k not in GROUPED})[0]['file'] if flat_path is None: logger.warning("Could not find a flatfield image, skipping...") continue if darkPattern is not None and darkPattern != '': dark_path = dark_files.get_matching(**{ k.upper(): v for k, v in files[0].items() if k not in GROUPED })[0]['file'] if dark_path is None: logger.warning("Could not find a darkfield image, skipping...") continue if photoPattern is not None and photoPattern != '': photo_path = photo_files.get_matching(**{ k.upper(): v for k, v in files[0].items() if k not in GROUPED })[0]['file'] if photo_path is None: logger.warning( "Could not find a photobleach file, skipping...") continue ProcessManager.submit_process(unshade_batch, files, outDir, flat_path, dark_path, photo_path) ProcessManager.join_processes()
def main( inpDir: Path, outDir: Path, ) -> None: ProcessManager.init_processes("main", "zarr") for file in inpDir.iterdir(): ProcessManager.submit_process(image_to_zarr, file, outDir) ProcessManager.join_processes()
def main( inpDir: Path, filePattern: str, outDir: Path, ) -> None: ProcessManager.init_processes("main", "zarr") fp = FilePattern(inpDir, filePattern) for files in fp(): for file in files: ProcessManager.submit_process(image_to_zarr, file["file"], outDir) ProcessManager.join_processes()
def main(input_dir: Path, output_dir: Path ) -> None: logger.info('Extracting tiffs and saving as ome.tif...') files = [f for f in Path(input_dir).iterdir() if f.suffix=='.czi'] if not files: logger.error('No CZI files found.') raise ValueError('No CZI files found.') ProcessManager.init_processes() for file in files: ProcessManager.submit_process(extract_fovs,file,output_dir) ProcessManager.join_processes()
type=str, help='Output collection', required=True) # Parse the arguments args = parser.parse_args() inpDir = args.inpDir if (Path.is_dir(Path(args.inpDir).joinpath('images'))): # switch to images folder if present fpath = str(Path(args.inpDir).joinpath('images').absolute()) logger.info('inpDir = {}'.format(inpDir)) projectionType = args.projectionType logger.info('projectionType = {}'.format(projectionType)) outDir = args.outDir logger.info('outDir = {}'.format(outDir)) # initialize projection function if projectionType == 'max': projection = max_min_projection method = np.max elif projectionType == 'min': projection = max_min_projection method = np.min elif projectionType == 'mean': projection = mean_projection method = None ProcessManager.init_processes('main', 'intensity') main(inpDir, outDir, projection, method)
def main(input_dir: pathlib.Path, pyramid_type: str, image_type: str, file_pattern: str, output_dir: pathlib.Path): # Set ProcessManager config and initialize ProcessManager.num_processes(multiprocessing.cpu_count()) ProcessManager.num_threads(2 * ProcessManager.num_processes()) ProcessManager.threads_per_request(1) ProcessManager.init_processes('pyr') logger.info('max concurrent processes = %s', ProcessManager.num_processes()) # Parse the input file directory fp = filepattern.FilePattern(input_dir, file_pattern) group_by = '' if 'z' in fp.variables and pyramid_type == 'Neuroglancer': group_by += 'z' logger.info( 'Stacking images by z-dimension for Neuroglancer precomputed format.' ) elif 'c' in fp.variables and pyramid_type == 'Zarr': group_by += 'c' logger.info('Stacking channels by c-dimension for Zarr format') elif 't' in fp.variables and pyramid_type == 'DeepZoom': group_by += 't' logger.info('Creating time slices by t-dimension for DeepZoom format.') else: logger.info( f'Creating one pyramid for each image in {pyramid_type} format.') depth = 0 depth_max = 0 image_dir = '' processes = [] for files in fp(group_by=group_by): # Create the output name for Neuroglancer format if pyramid_type in ['Neuroglancer', 'Zarr']: try: image_dir = fp.output_name([file for file in files]) except: pass if image_dir in ['', '.*']: image_dir = files[0]['file'].name # Reset the depth depth = 0 depth_max = 0 pyramid_writer = None for file in files: with bfio.BioReader(file['file'], max_workers=1) as br: if pyramid_type == 'Zarr': d_z = br.c else: d_z = br.z depth_max += d_z for z in range(d_z): pyramid_args = { 'base_dir': output_dir.joinpath(image_dir), 'image_path': file['file'], 'image_depth': z, 'output_depth': depth, 'max_output_depth': depth_max, 'image_type': image_type } pw = PyramidWriter[pyramid_type](**pyramid_args) ProcessManager.submit_process(pw.write_slide) depth += 1 if pyramid_type == 'DeepZoom': pw.write_info() if pyramid_type in ['Neuroglancer', 'Zarr']: if image_type == 'segmentation': ProcessManager.join_processes() pw.write_info() ProcessManager.join_processes()
# Initialize the main thread logger logger = logging.getLogger('main') logger.setLevel(logging.INFO) # Setup the Argument parsing logger.info('Parsing arguments...') parser = argparse.ArgumentParser(prog='main', description='Compile individual tiled tiff images into a single volumetric tiled tiff.') parser.add_argument('--inpDir', dest='input_dir', type=str, help='Path to folder with tiled tiff files', required=True) parser.add_argument('--outDir', dest='output_dir', type=str, help='The output directory for ome.tif files', required=True) parser.add_argument('--filePattern', dest='file_pattern', type=str, help='A filename pattern specifying variables in filenames.', required=True) args = parser.parse_args() input_dir = pathlib.Path(args.input_dir) if input_dir.joinpath("images").is_dir(): input_dir = input_dir.joinpath("images") output_dir = pathlib.Path(args.output_dir) file_pattern = args.file_pattern logger.info(f'input_dir = {input_dir}') logger.info(f'output_dir = {output_dir}') logger.info(f'file_pattern = {file_pattern}') logger.info(f'max_threads: {ProcessManager.num_processes()}') ProcessManager.init_processes('main','stack') main(input_dir, file_pattern, output_dir)