def segment_images(inpDir, outDir, config_data): """ Workflow for data with similar morphology as sialyltransferase 1. Args: inpDir : path to the input directory outDir : path to the output directory config_data : path to the configuration file """ logging.basicConfig(format='%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s', datefmt='%d-%b-%y %H:%M:%S') logger = logging.getLogger("main") logger.setLevel(logging.INFO) inpDir_files = os.listdir(inpDir) for i,f in enumerate(inpDir_files): logger.info('Segmenting image : {}'.format(f)) # Load image br = BioReader(os.path.join(inpDir,f)) image = br.read_image() structure_channel = 0 struct_img0 = image[:,:,:,structure_channel,0] struct_img0 = struct_img0.transpose(2,0,1).astype(np.float32) # main algorithm intensity_scaling_param = config_data['intensity_scaling_param'] struct_img = intensity_normalization(struct_img0, scaling_param=intensity_scaling_param) gaussian_smoothing_sigma = config_data['gaussian_smoothing_sigma'] structure_img_smooth = image_smoothing_gaussian_3d(struct_img, sigma=gaussian_smoothing_sigma) global_thresh_method = config_data['global_thresh_method'] object_minArea = config_data['object_minArea'] bw, object_for_debug = MO(structure_img_smooth, global_thresh_method=global_thresh_method, object_minArea=object_minArea, return_object=True) thin_dist_preserve = config_data['thin_dist_preserve'] thin_dist = config_data['thin_dist'] bw_thin = topology_preserving_thinning(bw>0, thin_dist_preserve, thin_dist) s3_param = config_data['s3_param'] bw_extra = dot_3d_wrapper(structure_img_smooth, s3_param) bw_combine = np.logical_or(bw_extra>0, bw_thin) minArea = config_data['minArea'] seg = remove_small_objects(bw_combine>0, min_size=minArea, connectivity=1, in_place=False) seg = seg > 0 out_img=seg.astype(np.uint8) out_img[out_img>0]=255 # create output image out_img = out_img.transpose(1,2,0) out_img = out_img.reshape((out_img.shape[0], out_img.shape[1], out_img.shape[2], 1, 1)) # write image using BFIO bw = BioWriter(os.path.join(outDir,f), metadata=br.read_metadata()) bw.num_x(out_img.shape[1]) bw.num_y(out_img.shape[0]) bw.num_z(out_img.shape[2]) bw.num_c(out_img.shape[3]) bw.num_t(out_img.shape[4]) bw.pixel_type(dtype='uint8') bw.write_image(out_img) bw.close_image()
def create_and_write_output(predictions_path, output_path, inpDir): """ This script uses the bfio utility to write the output. Inputs: predictions_path: The directory in which the neural networks writes its 3 channel output output_path: The directory in which the user wants the final binary output inpDir: The input directory consisting of the input collection """ filenames = sorted(os.listdir(predictions_path)) for filename in filenames: # read the 3 channel output image from the neural network image = cv2.imread(os.path.join(predictions_path, filename)) # create binary image output using the create_binary function out_image = create_binary(image) # read and store the metadata from the input image with BioReader(os.path.join(inpDir, filename)) as br: metadata = br.metadata # Write the binary output consisting of the metadata using bfio. output_image_5channel = np.zeros( (out_image.shape[0], out_image.shape[1], 1, 1, 1), dtype=np.uint8) output_image_5channel[:, :, 0, 0, 0] = out_image with BioWriter(os.path.join(output_path, filename), metadata=metadata) as bw: bw.dtype = output_image_5channel.dtype bw.write(output_image_5channel)
def write_to(self, outfile: Path): """ Writes a labelled ome.tif to the given path. This uses the metadata of the input file and sets the dtype depending on the number of labelled objects. Args: outfile: Path where the labelled image will be written. """ with BioWriter(outfile, metadata=self.metadata, max_workers=cpu_count()) as writer: writer.dtype = self.dtype() logger.info(f'writing {outfile.name} with dtype {self.dtype()}...') tile_size, _, num_cols, num_rows = self._get_iteration_params(writer.Z, writer.Y, writer.X) tile_count = 0 for z in range(writer.Z): for y in range(0, writer.Y, tile_size): y_max = min(writer.Y, y + tile_size) for x in range(0, writer.X, tile_size): x_max = min(writer.X, x + tile_size) tile = extract_tile(self.__polygon_set, (z, z + 1, y, y_max, x, x_max)) writer[y:y_max, x:x_max, z:z + 1, 0, 0] = tile.transpose(1, 2, 0) tile_count += 1 logger.debug(f'Wrote tile {tile_count}, ({z}, {y}:{y_max}, {x}:{x_max})') logger.info(f'Writing Progress {100 * tile_count / (num_cols * num_rows * writer.Z):6.3f}%...') return self
def test_correctness(self): # calculate the result with the plugin code with BioReader(self.infile.name) as reader: with BioWriter(self.outfile.name, metadata=reader.metadata) as writer: rolling_ball( reader=reader, writer=writer, ball_radius=self.ball_radius, light_background=False, ) # read the image we just wrote into a numpy array with BioReader(self.outfile.name) as reader: plugin_result = reader[:] # calculate the true result background = restoration.rolling_ball(self.random_image, radius=self.ball_radius) true_result = self.random_image - background # assert correctness self.assertTrue(numpy.all(numpy.equal(true_result, plugin_result)), f'The plugin resulted in a different image') return
def unshade_image(img, out_dir, brightfield, darkfield, photobleach=None, offset=None): with ProcessManager.thread() as active_threads: with BioReader(img, max_workers=active_threads.count) as br: with BioWriter(out_dir.joinpath(img.name), metadata=br.metadata, max_workers=active_threads.count) as bw: new_img = br[:, :, :1, 0, 0].squeeze().astype(np.float32) new_img = new_img - darkfield new_img = np.divide(new_img, brightfield) if photobleach != None: new_img = new_img - np.float32(photobleach) if offset != None: new_img = new_img + np.float32(offset) new_img[new_img < 0] = 0 new_img = new_img.astype(br.dtype) bw[:] = new_img
def read_file(input_directory, pixelsize, output_directory): img_pixelsize_x = pixelsize img_pixelsize_y = pixelsize modelfile_path = "2d_cell_net_v0-cytoplasm.modeldef.h5" weightfile_path = "snapshot_cytoplasm_iter_1000.caffemodel.h5" iofile_path = "output.h5" out_path = Path(output_directory) rootdir1 = Path(input_directory) """ Convert the tif to tiled tiff """ javabridge.start_vm(args=["-Dlog4j.configuration=file:{}".format(LOG4J)], class_path=JARS, run_headless=True) i = 0 try: for PATH in rootdir1.glob('**/*'): tile_grid_size = 1 tile_size = tile_grid_size * 1024 # Set up the BioReader with BioReader(PATH, backend='java', max_workers=cpu_count()) as br: # Loop through timepoints for t in range(br.T): # Loop through channels for c in range(br.C): with BioWriter(out_path.joinpath(f"final{i}.ome.tif"), metadata=br.metadata, backend='java') as bw: # Loop through z-slices for z in range(br.Z): # Loop across the length of the image for y in range(0, br.Y, tile_size): y_max = min([br.Y, y + tile_size]) # Loop across the depth of the image for x in range(0, br.X, tile_size): x_max = min([br.X, x + tile_size]) input_img = np.squeeze(br[y:y_max, x:x_max, z:z + 1, c, t]) img = unet_segmentation( input_img, img_pixelsize_x, img_pixelsize_y, modelfile_path, weightfile_path, iofile_path) bw[y:y_max, x:x_max, ...] = img os.remove("output.h5") i += 1 finally: # Close the javabridge. Since this is in the finally block, it is always run javabridge.kill_vm()
def init_zarr_file(path: Path, metadata: Any): with BioWriter(path, metadata=metadata) as writer: writer.dtype = numpy.uint32 writer.C = 1 writer.channel_names = ['label'] # noinspection PyProtectedMember writer._backend._init_writer() return
def close_thread(dependency: Future, bw: BioWriter): """ Close an image once the final tile is written Args: dependency (Future): The final tile thread bw (BioWriter): The BioWriter to clsoe Returns: Returns True when completed """ dependency.result() bw.close() return True
def setUpClass(cls) -> None: cls.infile = tempfile.NamedTemporaryFile(suffix='.ome.tif') cls.outfile = tempfile.NamedTemporaryFile(suffix='.ome.tif') with BioWriter(cls.infile.name) as writer: writer.X = cls.image_shape[0] writer.Y = cls.image_shape[1] writer[:] = cls.random_image[:] return
def tester(t, ij): try: print("Testing {} data type...".format(t)) shape = (2048, 2048) print("Creating Array...") array = np.random.randint(0, 255, size=shape, dtype=np.uint16) print("Converting Array...") array = NUMPY_TYPES[t][0](array) dtype0 = ij.py.dtype(array) print("The initial data type is {}".format(dtype0)) temp_path = Path(__file__).with_name("data-convert-temp") print("Writing image array to file...") with BioWriter(temp_path) as writer: writer.X = shape[0] writer.Y = shape[1] writer.dtype = array.dtype writer[:] = array[:] print("Reading image from file...") arr = BioReader(temp_path) print("Getting data type after reading image...") dtype1 = ij.py.dtype(arr[:, :, 0:1, 0, 0]) print("Data type after reading image is {}".format(dtype1)) # print('Trying to convert to PlanarImg') # planarimg = ij.planar(arr) if dtype0 != dtype1: print("Manully forcing data type back to {}".format(dtype0)) arr = NUMPY_TYPES[t][0](arr[:, :, 0:1, 0, 0]) print("Converting to Java object...") arr = ij_converter.to_java(ij, np.squeeze(arr), "ArrayImg") print("Getting data type after manually forcing...") dtype2 = ij.py.dtype(arr) print("Data type after manual forcing is {}".format(dtype2)) val_dtype = dtype2 else: arr = ij_converter.to_java(ij, np.squeeze(arr[:, :, 0:1, 0, 0]), "ArrayImg") val_dtype = dtype1 value = 5 print( "Converting input (value) to Java primitive type {}...".format( val_dtype)) val = ij_converter.to_java(ij, value, t, val_dtype) print("Calling ImageJ op...") out = ij.op().math().add(arr, val) print("The op was SUCCESSFUL with data type {}".format(t)) except: print("Testing data type {} was NOT SUCCESSFUL".format(t)) print(traceback.format_exc()) finally: print("Shutting down JVM...\n\n") del ij jpype.shutdownJVM()
def write_cropped_images( file_paths: list[Path], output_dir: Path, bounding_box: helpers.BoundingBox, ): """ Crops and writes the given group of images using the given bounding box. Args: file_paths: A list of Paths for the input images. output_dir: A Path to the output directory. bounding_box: The bounding-box to use for cropping the images """ z1, z2, y1, y2, x1, x2 = bounding_box out_depth, out_width, out_height = z2 - z1, y2 - y1, x2 - x1 logger.info(f'Superset bounding {bounding_box = }...') logger.info(f'Cropping to shape (z, y, x) = {out_depth, out_width, out_height}...') for file_path in file_paths: out_path = output_dir.joinpath(helpers.replace_extension(file_path.name)) logger.info(f'Writing {out_path.name}...') with BioReader(file_path) as reader: with BioWriter(out_path, metadata=reader.metadata, max_workers=constants.NUM_THREADS) as writer: writer.Z = out_depth writer.Y = out_width writer.X = out_height for z_out in range(writer.Z): z_in = z_out + z1 for out_y in range(0, writer.Y, constants.TILE_STRIDE): out_y_max = min(writer.Y, out_y + constants.TILE_STRIDE) in_y = out_y + y1 in_y_max = min(y2, in_y + constants.TILE_STRIDE) for out_x in range(0, writer.X, constants.TILE_STRIDE): out_x_max = min(writer.X, out_x + constants.TILE_STRIDE) in_x = out_x + x1 in_x_max = min(x2, in_x + constants.TILE_STRIDE) try: tile = reader[in_y:in_y_max, in_x:in_x_max, z_in:z_in + 1, 0, 0] writer[out_y:out_y_max, out_x:out_x_max, z_out:z_out + 1, 0, 0] = tile[:] except AssertionError as e: logger.error( f'failed to read tile {(in_y, in_y_max, in_x, in_x_max, z_in, z_in + 1) = }\n' f'and write to {(out_y, out_y_max, out_x, out_x_max, z_out, z_out + 1) = }\n' f'because {e}' ) raise e return
def assemble_image(vector_path: pathlib.Path, out_path: pathlib.Path, depth: int) -> None: """Assemble a 2d or 3d image This method assembles one image from one stitching vector. It can assemble both 2d and z-stacked 3d images It is intended to run as a process to parallelize stitching of multiple images. The basic approach to stitching is: 1. Parse the stitching vector and abstract the image dimensions 2. Generate a thread for each subsection (supertile) of an image. Args: vector_path: Path to the stitching vector out_path: Path to the output directory depth: depth of the input images """ # Grab a free process with ProcessManager.process(): # Parse the stitching vector parsed_vector = _parse_stitch(vector_path, timesliceNaming) # Initialize the output image with BioReader(parsed_vector['filePos'][0]['file']) as br: bw = BioWriter(out_path.joinpath(parsed_vector['name']), metadata=br.metadata, max_workers=ProcessManager._active_threads) bw.x = parsed_vector['width'] bw.y = parsed_vector['height'] bw.z = depth # Assemble the images ProcessManager.log(f'Begin assembly') for z in range(depth): ProcessManager.log(f'Assembling Z position : {z}') for x in range(0, parsed_vector['width'], chunk_size): X_range = min(x + chunk_size, parsed_vector['width'] ) # max x-pixel index in the assembled image for y in range(0, parsed_vector['height'], chunk_size): Y_range = min(y + chunk_size, parsed_vector['height'] ) # max y-pixel index in the assembled image ProcessManager.submit_thread(make_tile, x, X_range, y, Y_range, z, parsed_vector, bw) ProcessManager.join_threads() bw.close()
def setUpClass(cls) -> None: cls.infile = tempfile.NamedTemporaryFile(suffix='.ome.tif') cls.outfile = tempfile.NamedTemporaryFile(suffix='.ome.tif') random_image = numpy.random.randint( low=0, high=255, size=cls.image_shape, dtype=numpy.uint8, ) with BioWriter(cls.infile.name) as writer: writer.X = cls.image_shape[0] writer.Y = cls.image_shape[1] writer[:] = random_image[:] return
def write_corrected_images( *, group: list[utils.FPFileDict], channel_ordering: list[int], components_dir: Path, output_dir: Path, ): logger.info(f'writing corrected images...') files = [file['file'] for file in group] if len(channel_ordering) == 0: channel_ordering = list(range(len(files))) files = [files[c] for c in channel_ordering] for image_path in files: component_path = components_dir.joinpath(image_path.name) assert component_path.exists() output_path = output_dir.joinpath(image_path.name) if output_path.exists(): continue logger.info(f'writing image {image_path.name}...') with BioReader(image_path) as image_reader, BioReader( component_path) as component_reader: with BioWriter(output_path, metadata=image_reader.metadata) as writer: for y_min in range(0, writer.Y, utils.TILE_SIZE_2D): y_max = min(writer.Y, y_min + utils.TILE_SIZE_2D) for x_min in range(0, writer.X, utils.TILE_SIZE_2D): x_max = min(writer.X, x_min + utils.TILE_SIZE_2D) image_tile = numpy.squeeze(image_reader[y_min:y_max, x_min:x_max, 0, 0, 0]) component_tile = numpy.squeeze( component_reader[y_min:y_max, x_min:x_max, 0, 0, 0]) writer[y_min:y_max, x_min:x_max, 0, 0, 0] = image_tile - component_tile return
def label_cython(input_path: Path, output_path: Path, connectivity: int): """ Label the input image and writes labels back out. Args: input_path: Path to input image. output_path: Path for output image. connectivity: Connectivity kind. """ with ProcessManager.thread() as active_threads: with BioReader( input_path, max_workers=active_threads.count, ) as reader: with BioWriter( output_path, max_workers=active_threads.count, metadata=reader.metadata, ) as writer: # Load an image and convert to binary image = numpy.squeeze(reader[..., 0, 0]) if not numpy.any(image): writer.dtype = numpy.uint8 writer[:] = numpy.zeros_like(image, dtype=numpy.uint8) return image = (image > 0) if connectivity > image.ndim: ProcessManager.log( f'{input_path.name}: Connectivity is not less than or equal to the number of image dimensions, ' f'skipping this image. connectivity={connectivity}, ndim={image.ndim}' ) return # Run the labeling algorithm labels = ftl.label_nd(image, connectivity) # Save the image writer.dtype = labels.dtype writer[:] = labels return True
def zarr_to_tif(zarr_path: Path, out_path: Path): with BioReader(zarr_path, max_workers=utils.NUM_THREADS) as reader: with BioWriter(out_path, metadata=reader.metadata, max_workers=utils.NUM_THREADS) as writer: writer.dtype = numpy.uint32 for z in range(reader.Z): for y in range(0, reader.Y, utils.TILE_SIZE): y_max = min(reader.Y, y + utils.TILE_SIZE) for x in range(0, reader.X, utils.TILE_SIZE): x_max = min(reader.X, x + utils.TILE_SIZE) tile = reader[y:y_max, x:x_max, z:z + 1, 0, 0] writer[y:y_max, x:x_max, z:z + 1, 0, 0] = tile shutil.rmtree(zarr_path) return
def segment_images(inpDir, outDir, config_data): """ Workflow for data with filamentous structures such as ZO1, Beta Actin, Titin, Troponin 1. Args: inpDir : path to the input directory outDir : path to the output directory config_data : path to the configuration file """ logging.basicConfig(format='%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s', datefmt='%d-%b-%y %H:%M:%S') logger = logging.getLogger("main") logger.setLevel(logging.INFO) inpDir_files = os.listdir(inpDir) for i,f in enumerate(inpDir_files): logger.info('Segmenting image : {}'.format(f)) # Load image br = BioReader(os.path.join(inpDir,f)) image = br.read_image() structure_channel = 0 struct_img0 = image[:,:,:,structure_channel,0] struct_img0 = struct_img0.transpose(2,0,1).astype(np.float32) # main algorithm intensity_scaling_param = config_data['intensity_scaling_param'] struct_img = intensity_normalization(struct_img0, scaling_param=intensity_scaling_param) gaussian_smoothing_sigma = config_data['gaussian_smoothing_sigma'] if config_data['preprocessing_function'] == 'image_smoothing_gaussian_3d': structure_img_smooth = image_smoothing_gaussian_3d(struct_img, sigma=gaussian_smoothing_sigma) elif config_data['preprocessing_function'] == 'edge_preserving_smoothing_3d': structure_img_smooth = edge_preserving_smoothing_3d(struct_img) f3_param = config_data['f3_param'] bw = filament_3d_wrapper(structure_img_smooth, f3_param) minArea = config_data['minArea'] seg = remove_small_objects(bw>0, min_size=minArea, connectivity=1, in_place=False) seg = seg >0 out_img=seg.astype(np.uint8) out_img[out_img>0]=255 # create output image out_img = out_img.transpose(1,2,0) out_img = out_img.reshape((out_img.shape[0], out_img.shape[1], out_img.shape[2], 1, 1)) # write image using BFIO bw = BioWriter(os.path.join(outDir,f)) bw.num_x(out_img.shape[1]) bw.num_y(out_img.shape[0]) bw.num_z(out_img.shape[2]) bw.num_c(out_img.shape[3]) bw.num_t(out_img.shape[4]) bw.pixel_type(dtype='uint8') bw.write_image(out_img) bw.close_image()
# Input and output directory batch = args.batch.split(',') output_dir = args.output_directory # Load Model Architecture and model weights model = unet() model.load_weights('unet.h5') # Iterate over the files to be processed for filename in batch: logger.info("Processing image: {}".format(filename)) # Use bfio to read the image with BioReader(filename) as br: with BioWriter(str( Path(output_dir).joinpath( Path(filename).name).absolute()), metadata=br.metadata) as bw: bw.dtype = np.uint8 for x in range(0, br.X, tile_size): x_max = min([br.X, x + tile_size]) for y in range(0, br.Y, tile_size): y_max = min([br.Y, y + tile_size]) img = br[y:y_max, x:x_max, 0:1, 0, 0] # Extract the 2-D grayscale image. Bfio loads an image as a 5-D array. img = img[:, :, 0, 0, 0]
def segment_images(inpDir, outDir, config_data): """ Workflow for data with similar morphology as LAMP-1 Args: inpDir : path to the input directory outDir : path to the output directory config_data : path to the configuration file """ logging.basicConfig( format='%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s', datefmt='%d-%b-%y %H:%M:%S') logger = logging.getLogger("main") logger.setLevel(logging.INFO) inpDir_files = os.listdir(inpDir) for i, f in enumerate(inpDir_files): logger.info('Segmenting image : {}'.format(f)) # Load image br = BioReader(os.path.join(inpDir, f)) image = br.read_image() structure_channel = 0 struct_img0 = image[:, :, :, structure_channel, 0] struct_img0 = struct_img0.transpose(2, 0, 1).astype(np.float32) # main algorithm intensity_scaling_param = config_data['intensity_scaling_param'] struct_img = intensity_normalization( struct_img0, scaling_param=intensity_scaling_param) gaussian_smoothing_sigma = config_data['gaussian_smoothing_sigma'] structure_img_smooth = image_smoothing_gaussian_slice_by_slice( struct_img, sigma=gaussian_smoothing_sigma) s2_param = config_data['s2_param'] bw_spot = dot_2d_slice_by_slice_wrapper(structure_img_smooth, s2_param) f2_param = config_data['f2_param'] bw_filament = filament_2d_wrapper(structure_img_smooth, f2_param) bw = np.logical_or(bw_spot, bw_filament) fill_2d = config_data['fill_2d'] if fill_2d == 'True': fill_2d = True elif fill_2d == 'False': fill_2d = False fill_max_size = config_data['fill_max_size'] minArea = config_data['minArea'] bw_fill = hole_filling(bw, 0, fill_max_size, False) seg = remove_small_objects(bw_fill > 0, min_size=minArea, connectivity=1, in_place=False) seg = seg > 0 out_img = seg.astype(np.uint8) out_img[out_img > 0] = 255 # create output image out_img = out_img.transpose(1, 2, 0) out_img = out_img.reshape( (out_img.shape[0], out_img.shape[1], out_img.shape[2], 1, 1)) # write image using BFIO bw = BioWriter(os.path.join(outDir, f), metadata=br.read_metadata()) bw.num_x(out_img.shape[1]) bw.num_y(out_img.shape[0]) bw.num_z(out_img.shape[2]) bw.num_c(out_img.shape[3]) bw.num_t(out_img.shape[4]) bw.pixel_type(dtype='uint8') bw.write_image(out_img) bw.close_image()
def segment_images(inpDir, outDir, config_data): """ Workflow for data with shell like shapes such as lamin B1 (interphase-specific) Args: inpDir : path to the input directory outDir : path to the output directory config_data : path to the configuration file """ logging.basicConfig( format='%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s', datefmt='%d-%b-%y %H:%M:%S') logger = logging.getLogger("main") logger.setLevel(logging.INFO) inpDir_files = os.listdir(inpDir) for i, f in enumerate(inpDir_files): logger.info('Segmenting image : {}'.format(f)) # Load image br = BioReader(os.path.join(inpDir, f)) image = br.read_image() structure_channel = 0 struct_img0 = image[:, :, :, structure_channel, 0] struct_img0 = struct_img0.transpose(2, 0, 1).astype(np.float32) # main algorithm intensity_scaling_param = config_data['intensity_scaling_param'] struct_img = intensity_normalization( struct_img0, scaling_param=intensity_scaling_param) gaussian_smoothing_sigma = config_data['gaussian_smoothing_sigma'] structure_img_smooth = image_smoothing_gaussian_3d( struct_img, sigma=gaussian_smoothing_sigma) middle_frame_method = config_data['middle_frame_method'] mid_z = get_middle_frame(structure_img_smooth, method=middle_frame_method) f2_param = config_data['f2_param'] bw_mid_z = filament_2d_wrapper(structure_img_smooth[mid_z, :, :], f2_param) hole_max = config_data['hole_max'] hole_min = config_data['hole_min'] bw_fill_mid_z = hole_filling(bw_mid_z, hole_min, hole_max) seed = get_3dseed_from_mid_frame( np.logical_xor(bw_fill_mid_z, bw_mid_z), struct_img.shape, mid_z, hole_min) bw_filled = watershed( struct_img, seed.astype(int), watershed_line=True) > 0 seg = np.logical_xor(bw_filled, dilation(bw_filled, selem=ball(1))) seg = seg > 0 out_img = seg.astype(np.uint8) out_img[out_img > 0] = 255 # create output image out_img = out_img.transpose(1, 2, 0) out_img = out_img.reshape( (out_img.shape[0], out_img.shape[1], out_img.shape[2], 1, 1)) # write image using BFIO bw = BioWriter(os.path.join(outDir, f)) bw.num_x(out_img.shape[1]) bw.num_y(out_img.shape[0]) bw.num_z(out_img.shape[2]) bw.num_c(out_img.shape[3]) bw.num_t(out_img.shape[4]) bw.pixel_type(dtype='uint8') bw.write_image(out_img) bw.close_image()
def make_tile(x_min: int, x_max: int, y_min: int, y_max: int, z: int, parsed_vector: dict, bw: BioWriter) -> None: """Create a supertile from images and save to file This method builds a supertile, which is a section of the image defined by the global variable ``chunk_size`` and is composed of multiple smaller tiles defined by the ``BioReader._TILE_SIZE``. Images are stored on disk as compressed chunks that are ``_TILE_SIZE`` length and width, and the upper left pixel of a tile is always a multiple of ``_TILE_SIZE``. To prevent excessive file loading and to ensure files are properly placed, supertiles are created from smaller images and saved all at once. Args: x_min: Minimum x bound of the tile x_max: Maximum x bound of the tile y_min: Minimum y bound of the tile y_max: Maximum y bound of the tile z: Current z position to assemble parsed_vector: The result of _parse_vector local_threads: Used to determine the number of concurrent threads to run bw: The output file object """ with ProcessManager.thread() as active_threads: # Get the data type with BioReader(parsed_vector['filePos'][0]['file']) as br: dtype = br.dtype # initialize the supertile template = numpy.zeros((y_max-y_min,x_max-x_min,1,1,1),dtype=dtype) # get images in bounds of current super tile for f in parsed_vector['filePos']: # check that image is within the x-tile bounds if (f['posX'] >= x_min and f['posX'] <= x_max) \ or (f['posX']+f['width'] >= x_min and f['posX']+f['width'] <= x_max) \ or (f['posX'] <= x_min and f['posX']+f['width'] >= x_max): # check that image is within the y-tile bounds if (f['posY'] >= y_min and f['posY'] <= y_max) \ or (f['posY']+f['height'] >= y_min and f['posY']+f['height'] <= y_max) \ or (f['posY'] <= y_min and f['posY']+f['height'] >= y_max): # get bounds of image within the tile Xt = [max(0,f['posX']-x_min)] Xt.append(min(x_max-x_min,f['posX']+f['width']-x_min)) Yt = [max(0,f['posY']-y_min)] Yt.append(min(y_max-y_min,f['posY']+f['height']-y_min)) # get bounds of image within the image Xi = [max(0,x_min - f['posX'])] Xi.append(min(f['width'],x_max - f['posX'])) Yi = [max(0,y_min - f['posY'])] Yi.append(min(f['height'],y_max - f['posY'])) # Load the image with BioReader(f['file'],max_workers=active_threads.count) as br: image = br[Yi[0]:Yi[1],Xi[0]:Xi[1],z:z+1,0,0] # only get the first c,t layer # Put the image in the buffer template[Yt[0]:Yt[1],Xt[0]:Xt[1],...] = image # Save the image bw.max_workers = ProcessManager._active_threads bw[y_min:y_max,x_min:x_max,z:z+1,0,0] = template
def _write_components_thread( self, output_dir: Path, image_name: str, source_index: int, ): """ Writes the bleed-through components for a single image. This function can be run in a single thread in a ProcessPoolExecutor. Args: output_dir: Path for the directory of the bleed-through components. image_name: name of the source image. source_index: index of the source channel. """ neighbor_indices = self._get_neighbors(source_index) neighbor_mins = [self.image_mins[i] for i in neighbor_indices] neighbor_maxs = [self.image_maxs[i] for i in neighbor_indices] coefficients = self.__coefficients[source_index] neighbor_readers = [ BioReader(self.__files[i], max_workers=utils.NUM_THREADS) for i in neighbor_indices ] with BioReader(self.__files[source_index], max_workers=utils.NUM_THREADS) as source_reader: metadata = source_reader.metadata num_tiles = utils.count_tiles_2d(source_reader) tile_indices = list(utils.tile_indices_2d(source_reader)) with BioWriter( output_dir.joinpath(image_name), metadata=metadata, max_workers=utils.NUM_THREADS, ) as writer: logger.info(f'Writing components for {image_name}...') for i, (z, y_min, y_max, x_min, x_max) in enumerate(tile_indices): tile = numpy.squeeze(source_reader[y_min:y_max, x_min:x_max, z:z + 1, 0, 0]) original_component = numpy.zeros_like(tile) if i % 10 == 0: logger.info( f'Writing {image_name}: Progress {100 * i / num_tiles:6.2f} %' ) all_kernel_indices = numpy.asarray( self._get_kernel_indices(source_index), dtype=numpy.uint64) for neighbor_index, (neighbor_reader, min_val, max_val) in enumerate( zip(neighbor_readers, neighbor_mins, neighbor_maxs)): neighbor_tile = utils.normalize_tile( tile=numpy.squeeze(neighbor_reader[y_min:y_max, x_min:x_max, z:z + 1, 0, 0]), min_val=min_val, max_val=max_val, ) kernel_size = self.__kernel_size**2 kernel_indices = all_kernel_indices[ kernel_size * neighbor_index:kernel_size * (1 + neighbor_index)] kernel = coefficients[kernel_indices] kernel = numpy.reshape(kernel, newshape=(self.__kernel_size, self.__kernel_size)) if numpy.any(kernel > 0): if self.__kernel_size > 1: smoothed_tile = scipy.ndimage.gaussian_filter( neighbor_tile, 2) smoothed_tile = numpy.min(numpy.dstack( (smoothed_tile, neighbor_tile)), axis=-1) else: smoothed_tile = neighbor_tile # apply the coefficient current_component = scipy.ndimage.correlate( smoothed_tile, kernel) # Rescale, but do not add in the minimum value offset. current_component *= (max_val - min_val) original_component += current_component.astype( tile.dtype) # Make sure bleed-through is not higher than the original signal. original_component = numpy.min(numpy.dstack( (tile, original_component)), axis=-1) writer[y_min:y_max, x_min:x_max, z:z + 1, 0, 0] = original_component [reader.close() for reader in neighbor_readers] return
def main(inpDir: Path, cellprob_threshold: float, flow_threshold: float, outDir: Path ) -> None: # Get the list of files in path files = [p for p in Path(inpDir).iterdir() if p.name.endswith('_flow.ome.zarr')] num_threads = max([cpu_count()//2,1]) logger.info(f'Processing tiles with {num_threads} threads using {DEV}') if len(files) == 0: logger.critical('No flow files detected.') quit() processes = [] with ThreadPoolExecutor(num_threads) as executor: # Loop through files in inpDir image collection and process for ind,fpath in enumerate(files): br = BioReader(fpath) threads = np.empty((br.shape[:3]),dtype=object) logger.debug( 'Processing image ({}/{}): {}'.format(ind, len(files), fpath)) # TODO: Hard coding to ome.tif for now, this should be changed later. path = Path(outDir).joinpath(fpath.name.replace('_flow.ome.zarr','.ome.tif')) bw = BioWriter(file_path=Path(path), metadata=br.metadata) bw.dtype=np.dtype(np.uint32) bw.C = 1 bw.channel_names = ['label'] for z in range(0, br.Z, 1): y_ind = None dependency1 = None for y in range(0, br.Y, TILE_SIZE): for x in range(0, br.X, TILE_SIZE): dependency2 = None if y_ind is None else threads[y_ind,x//TILE_SIZE,z] processes.append(executor.submit(mask_thread, (x,y,z), fpath,bw, cellprob_threshold,flow_threshold, dependency1,dependency2)) dependency1 = processes[-1] threads[y//TILE_SIZE,x//TILE_SIZE,z] = dependency1 y_ind = y//TILE_SIZE executor.submit(close_thread,dependency1,bw) done, not_done = wait(processes, 0) logger.info(f'Percent complete: {100 * len(done) / len(processes):6.3f}%') while len(not_done) > 0: for r in done: r.result() done, not_done = wait(processes, 15) logger.info(f'Percent complete: {100 * len(done) / len(processes):6.3f}%')
outvals['width'], outvals['height'])) # Variables for tile building processes pnum = 0 ptotal = np.ceil(outvals['width'] / 10240) * np.ceil( outvals['height'] / 10240) ptotal = 1 / ptotal * 100 # Initialize the output image logger.info('Initializing output file: {}'.format(outvals['name'])) refImg = str( Path(imgPath).joinpath(outvals['filePos'][0]['file']).absolute()) outFile = str(Path(outDir).joinpath(outvals['name']).absolute()) br = BioReader(str(Path(refImg).absolute())) bw = BioWriter(str(Path(outFile).absolute()), metadata=br.read_metadata(), max_workers=max([multiprocessing.cpu_count(), 2])) bw.num_x(outvals['width']) bw.num_y(outvals['height']) del br # Assemble the images logger.info('Generating tiles...') threads = [] with ThreadPoolExecutor(max([multiprocessing.cpu_count() // 2, 2])) as executor: for x in range(0, outvals['width'], 10240): X_range = min(x + 10240, outvals['width'] ) # max x-pixel index in the assembled image for y in range(0, outvals['height'], 10240): Y_range = min(y + 10240, outvals['height']
def segment_images(inpDir, outDir, config_data): """ Workflow for dot like shapes such as Centrin-2, Desmoplakin, PMP34. Args: inpDir : path to the input directory outDir : path to the output directory config_data : path to the configuration file """ logging.basicConfig( format='%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s', datefmt='%d-%b-%y %H:%M:%S') logger = logging.getLogger("main") logger.setLevel(logging.INFO) inpDir_files = os.listdir(inpDir) for i, f in enumerate(inpDir_files): logger.info('Segmenting image : {}'.format(f)) # Load an image br = BioReader(os.path.join(inpDir, f)) image = br.read_image() structure_channel = 0 struct_img0 = image[:, :, :, structure_channel, 0] struct_img0 = struct_img0.transpose(2, 0, 1).astype(np.float32) # main algorithm intensity_scaling_param = config_data['intensity_scaling_param'] struct_img = intensity_normalization( struct_img0, scaling_param=intensity_scaling_param) gaussian_smoothing_sigma = config_data['gaussian_smoothing_sigma'] if config_data["gaussian_smoothing"] == "gaussian_slice_by_slice": structure_img_smooth = image_smoothing_gaussian_slice_by_slice( struct_img, sigma=gaussian_smoothing_sigma) else: structure_img_smooth = image_smoothing_gaussian_3d( struct_img, sigma=gaussian_smoothing_sigma) s3_param = config_data['s3_param'] bw = dot_3d_wrapper(structure_img_smooth, s3_param) minArea = config_data['minArea'] Mask = remove_small_objects(bw > 0, min_size=minArea, connectivity=1, in_place=False) Seed = dilation(peak_local_max(struct_img, labels=label(Mask), min_distance=2, indices=False), selem=ball(1)) Watershed_Map = -1 * distance_transform_edt(bw) seg = watershed(Watershed_Map, label(Seed), mask=Mask, watershed_line=True) seg = remove_small_objects(seg > 0, min_size=minArea, connectivity=1, in_place=False) seg = seg > 0 out_img = seg.astype(np.uint8) out_img[out_img > 0] = 255 # create output image out_img = out_img.transpose(1, 2, 0) out_img = out_img.reshape( (out_img.shape[0], out_img.shape[1], out_img.shape[2], 1, 1)) # write image using BFIO bw = BioWriter(os.path.join(outDir, f)) bw.num_x(out_img.shape[1]) bw.num_y(out_img.shape[0]) bw.num_z(out_img.shape[2]) bw.num_c(out_img.shape[3]) bw.num_t(out_img.shape[4]) bw.pixel_type(dtype='uint8') bw.write_image(out_img) bw.close_image()
unique_levels.update([0]) # Start the javabridge with proper java logging logger.info('Initializing the javabridge...') log_config = Path(__file__).parent.joinpath("log4j.properties") jutil.start_vm(args=["-Dlog4j.configuration=file:{}".format(str(log_config.absolute()))],class_path=bioformats.JARS) # Generate the heatmap images logger.info('Generating heatmap images...') for w in widths: for h in heights: for l in unique_levels: out_file = Path(outImages).joinpath(str(w) + '_' + str(h) + '_' + str(l) + '.ome.tif') if not out_file.exists(): image = np.ones((h,w,1,1,1),dtype=np.uint8)*l bw = BioWriter(str(Path(outImages).joinpath(str(w) + '_' + str(h) + '_' + str(l) + '.ome.tif').absolute()),X=w,Y=h,Z=1,C=1,T=1) bw.write_image(image) bw.close_image() # Close the javabridge logger.info('Closing the javabridge...') jutil.kill_vm() # Build the output stitching vector logger.info('Generating the heatmap...') file_name = '{}_{}_{}.ome.tif' for num,feat in enumerate(feature_list): fpath = str(Path(outVectors).joinpath('img-global-positions-' + str(num+1) + '.txt').absolute()) with open(fpath,'w') as fw: line = 0 while True:
def basic(files: typing.List[Path], out_dir: Path, metadata_dir: typing.Optional[Path] = None, darkfield: bool = False, photobleach: bool = False): # Try to infer a filename try: pattern = infer_pattern([f['file'].name for f in files]) fp = FilePattern(files[0]['file'].parent,pattern) base_output = fp.output_name() # Fallback to the first filename except: base_output = files[0]['file'].name extension = ''.join(files[0]['file'].suffixes) with ProcessManager.process(base_output): # Load files and sort ProcessManager.log('Loading and sorting images...') img_stk,X,Y = _get_resized_image_stack(files) img_stk_sort = np.sort(img_stk) # Initialize options new_options = _initialize_options(img_stk_sort,darkfield,OPTIONS) # Initialize flatfield/darkfield matrices ProcessManager.log('Beginning flatfield estimation') flatfield_old = np.ones((new_options['size'],new_options['size']),dtype=np.float64) darkfield_old = np.random.normal(size=(new_options['size'],new_options['size'])).astype(np.float64) # Optimize until the change in values is below tolerance or a maximum number of iterations is reached for w in range(new_options['max_reweight_iterations']): # Optimize using inexact augmented Legrangian multiplier method using L1 loss A, E1, A_offset = _inexact_alm_l1(copy.deepcopy(img_stk_sort),new_options) # Calculate the flatfield/darkfield images and update training weights flatfield, darkfield, new_options = _get_flatfield_and_reweight(A,E1,A_offset,new_options) # Calculate the change in flatfield and darkfield images between iterations mad_flat = np.sum(np.abs(flatfield-flatfield_old))/np.sum(np.abs(flatfield_old)) temp_diff = np.sum(np.abs(darkfield - darkfield_old)) if temp_diff < 10**-7: mad_dark =0 else: mad_dark = temp_diff/np.max(np.sum(np.abs(darkfield_old)),initial=10**-6) flatfield_old = flatfield darkfield_old = darkfield # Stop optimizing if the change in flatfield/darkfield is below threshold ProcessManager.log('Iteration {} loss: {}'.format(w+1,mad_flat)) if np.max(mad_flat,initial=mad_dark) < new_options['reweight_tol']: break # Calculate photobleaching effects if specified if photobleach: pb = _get_photobleach(copy.deepcopy(img_stk),flatfield,darkfield) # Resize images back to original image size ProcessManager.log('Saving outputs...') flatfield = cv2.resize(flatfield,(Y,X),interpolation=cv2.INTER_CUBIC).astype(np.float32) if new_options['darkfield']: darkfield = cv2.resize(darkfield,(Y,X),interpolation=cv2.INTER_CUBIC).astype(np.float32) # Export the flatfield image as a tiled tiff flatfield_out = base_output.replace(extension,'_flatfield' + extension) with BioReader(files[0]['file'],max_workers=2) as br: metadata = br.metadata with BioWriter(out_dir.joinpath(flatfield_out),metadata=metadata,max_workers=2) as bw: bw.dtype = np.float32 bw.x = X bw.y = Y bw[:] = np.reshape(flatfield,(Y,X,1,1,1)) # Export the darkfield image as a tiled tiff if new_options['darkfield']: darkfield_out = base_output.replace(extension,'_darkfield' + extension) with BioWriter(out_dir.joinpath(darkfield_out),metadata=metadata,max_workers=2) as bw: bw.dtype = np.float32 bw.x = X bw.y = Y bw[:] = np.reshape(darkfield,(Y,X,1,1,1)) # Export the photobleaching components as csv if photobleach: offsets_out = base_output.replace(extension,'_offsets.csv') with open(metadata_dir.joinpath(offsets_out),'w') as fw: fw.write('file,offset\n') for f,o in zip(files,pb[0,:].tolist()): fw.write("{},{}\n".format(f,o))
def main(): logger.info("Parsing arguments...") parser = argparse.ArgumentParser( prog='main', description='Image clustering annotation plugin.') parser.add_argument('--imgdir', dest='imgdir', type=str, help='Input collection- Image data', required=True) parser.add_argument('--csvdir', dest='csvdir', type=str, help='Input collection- csv data', required=True) parser.add_argument('--borderwidth', dest='borderwidth', type=int, default=2, help='Border width', required=False) parser.add_argument('--outdir', dest='outdir', type=str, help='Output collection', required=True) # Parse the arguments args = parser.parse_args() #Path to image directory imgdir = args.imgdir logger.info('imgdir = {}'.format(imgdir)) #Path to csvfile directory csvdir = args.csvdir logger.info('csvdir = {}'.format(csvdir)) #Get the border width borderwidth = args.borderwidth logger.info('borderwidth = {}'.format(borderwidth)) #Path to save output image files outdir = args.outdir logger.info('outdir = {}'.format(outdir)) #Get list of .ome.tif files in the directory including sub folders img_ext = '*.ome.tif' configfiles = list_file(imgdir, img_ext) config = [os.path.basename(path) for path in configfiles] #Check whether .ome.tif files are present in the labeled image directory if not configfiles: raise ValueError('No .ome.tif files found.') #Get list of .csv files in the directory including sub folders csv_ext = '*.csv' inputcsv = list_file(csvdir, csv_ext) if not inputcsv: raise ValueError('No .csv files found.') for inpfile in inputcsv: #Get the full path split_file = os.path.normpath(inpfile) #split to get only the filename inpfilename = os.path.split(split_file) file_name_csv = inpfilename[-1] file_path = inpfilename[0] file_name, file_name1 = file_name_csv.split('.', 1) logger.info('Reading the file ' + file_name) #Read csv file cluster_data = pd.read_csv(inpfile) cluster_data = cluster_data.iloc[:, [0, -1]] for index, row in cluster_data.iterrows(): filename = row[0] cluster = row[1] #get the image file that matches with the filename in csvfile matches = [match for match in config if filename in match] if len(matches) == 0: logger.warning( f"Could not find image files matching the filename, {filename}. Skipping..." ) continue match_getpath = [s for s in configfiles if matches[0] in s] #Get the full path full_path = os.path.normpath(match_getpath[0]) #split to get only the filename file_path = os.path.split(full_path)[0] #Get the image path and output directory path imgpath = Path(file_path) outpath = Path(outdir) #Read and write(after making changes) the .ome.tif files with BioReader(imgpath / filename) as br, \ BioWriter(outpath / filename,metadata=br.metadata) as bw: #Make all pixels zero except the borders of specified thickness and assign the cluster_id to border pixels mask = np.zeros(br.shape, dtype=np.int16) mask[:borderwidth, :] = cluster mask[:, :borderwidth] = cluster mask[-borderwidth:, :] = cluster mask[:, -borderwidth:] = cluster bw.dtype = mask.dtype bw[:] = mask logger.info("Finished all processes!")
def binary_operation(image, output, function, extra_arguments, extra_padding, kernel, Tile_Size): """ This function goes through the images and calls the appropriate binary operation Parameters ---------- image : str Location of image function_to_call : str The binary operation to dispatch on image extra_arguments : int Extra argument(s) for the binary operation that is called extra_padding : int The extra padding around each tile so that binary operations do not skewed around the edges. kernel : cv2 object The kernel used for most binary operations output : str Location for BioWriter Tile_Size : int Tile Size for reading images """ # Start the javabridge with proper java logging logger.info('Initializing the javabridge...') log_config = Path(__file__).parent.joinpath("log4j.properties") jutil.start_vm(args=[ "-Dlog4j.configuration=file:{}".format(str(log_config.absolute())) ], class_path=bioformats.JARS) try: # Read the image br = BioReader(image) # Get the dimensions of the Image br_x, br_y, br_z, br_c, br_t = br.num_x(), br.num_y(), br.num_z( ), br.num_c(), br.num_t() br_shape = (br_x, br_y, br_z, br_c, br_t) datatype = br.pixel_type() max_datatype_val = np.iinfo(datatype).max logger.info("Original Datatype {}: ({})".format( datatype, max_datatype_val)) logger.info("Shape of Input (XYZCT): {}".format(br_shape)) # Initialize Output bw = BioWriter(file_path=output, metadata=br.read_metadata()) # Initialize the Python Generators to go through each "tile" of the image tsize = Tile_Size + (2 * extra_padding) logger.info("Tile Size {}x{}".format(tsize, tsize)) readerator = br.iterate(tile_stride=[Tile_Size, Tile_Size], tile_size=[tsize, tsize], batch_size=1) writerator = bw.writerate(tile_size=[Tile_Size, Tile_Size], tile_stride=[Tile_Size, Tile_Size], batch_size=1) next(writerator) for images, indices in readerator: # Extra tiles do not need to be calculated. # Indices should range from -intkernel < index value < Image_Dimension + intkernel if (indices[0][0][0] == br_x - extra_padding) or (indices[1][0][0] == br_y - extra_padding): continue logger.info(indices) # Images are (1, Tile_Size, Tile_Size, 1) # Need to convert to (Tile_Size, Tile_Size) to be able to do operation images = np.squeeze(images) images[images == max_datatype_val] = 1 # Initialize which function we are dispatching if callable(function): trans_image = function(images, kernel=kernel, n=extra_arguments) trans_image = trans_image.astype(datatype) trans_image[trans_image == 1] = max_datatype_val # The image needs to be converted back to (1, Tile_Size_Tile_Size, 1) to write it reshape_img = np.reshape( trans_image[extra_padding:-extra_padding, extra_padding:-extra_padding], (1, Tile_Size, Tile_Size, 1)) # Send it to the Writerator writerator.send(reshape_img) # Close the image bw.close_image() except: traceback.print_exc() # Always close the JavaBridge finally: jutil.kill_vm()
# Start the javabridge with proper java logging logger.info('Initializing the javabridge...') log_config = Path(__file__).parent.joinpath("log4j.properties") jutil.start_vm(args=[ "-Dlog4j.configuration=file:{}".format(str(log_config.absolute())) ], class_path=bioformats.JARS) inpDir_files = str(images).split(',') # Loop through files in inpDir image collection and process try: for f in inpDir_files: # Initialize the reader/writer objects logger.info('Segmenting image: {}'.format(f)) br = BioReader(str(Path(inpDir).joinpath(f))) bw = BioWriter(str(Path(outDir).joinpath(f)), metadata=br.read_metadata()) # Initialize the generators batch_size = min([ 20, br.maximum_batch_size(tile_size=tile_size, tile_stride=tile_stride) ]) readerator = br.iterate(tile_size=tile_size, tile_stride=tile_stride, batch_size=batch_size) writerator = bw.writerate(tile_size=tile_size, tile_stride=tile_stride, batch_size=batch_size) next(writerator)