def _merge_layers(input_dir, input_files, output_dir, output_file): zs = [z for z in input_files.keys()] # sorted list of filenames by z-value zs.sort() # Initialize the output file br = BioReader( str(Path(input_dir).joinpath(input_files[zs[0]][0]).absolute())) bw = BioWriter(str(Path(output_dir).joinpath(output_file).absolute()), metadata=br.read_metadata()) bw.num_z(Z=len(zs)) del br # Load each image and save to the volume file for z, i in zip(zs, range(len(zs))): br = BioReader( str(Path(input_dir).joinpath(input_files[z][0]).absolute())) bw.write_image(br.read_image(), Z=[i, i + 1]) del br # Close the output image and delete bw.close_image() del bw
{% endfor -%} {% for inp,val in cookiecutter._inputs|dictsort -%} {% for out,n in cookiecutter._outputs|dictsort -%} {% if val.type=="collection" and cookiecutter.use_bfio -%} # Loop through files in {{ inp }} image collection and process for i,f in enumerate({{ inp }}_files): # Load an image br = BioReader(Path({{ inp }}).joinpath(f)) image = np.squeeze(br.read_image()) # initialize the output out_image = np.zeros(image.shape,dtype=br._pix['type']) """ Do some math and science - you should replace this """ logger.info('Processing image ({}/{}): {}'.format(i,len({{ inp }}_files),f)) out_image = awesome_math_and_science_function(image) # Write the output bw = BioWriter(Path({{ out }}).joinpath(f),metadata=br.read_metadata()) bw.write_image(np.reshape(out_image,(br.num_y(),br.num_x(),br.num_z(),1,1))) {%- endif %}{% endfor %}{% endfor %} finally: {%- if cookiecutter.use_bfio %} # Close the javabridge regardless of successful completion logger.info('Closing the javabridge') jutil.kill_vm() {%- endif %} # Exit the program sys.exit()
reference_image_downscaled, max_val, min_val, method) # upscale the rough homography matrix logger.info("Inverting homography...") if method=='Projective': Rough_Homography_Upscaled=Rough_Homography_Downscaled*scale_matrix homography_inverse=np.linalg.inv(Rough_Homography_Upscaled) else: Rough_Homography_Upscaled=Rough_Homography_Downscaled homography_inverse=cv2.invertAffineTransform(Rough_Homography_Downscaled) # Initialize the output file bw = BioWriter(str(Path(outDir).joinpath(Path(registration_set[1]).name)),metadata=br_mov.read_metadata(),max_workers=write_workers) bw.num_x(br_ref.num_x()) bw.num_y(br_ref.num_y()) bw.num_z(1) bw.num_c(1) bw.num_t(1) # transformation variables reg_shape = [] reg_tiles = [] reg_homography = [] # Loop through image tiles and start threads logger.info("Starting threads...") threads = [] first_tile = True
paths.append(file) break # make sure that files were found in the current loop if len(paths) == 0: continue # Initialize the output file br = BioReader(paths[0]['file']) file_name = filepattern.output_name( filePattern, paths, {c: paths[0][c] for c in fp.variables if c != 'c'}) logger.info('Writing: {}'.format(file_name)) bw = BioWriter(str(Path(outDir).joinpath(file_name)), metadata=br.read_metadata()) del br # Modify the metadata to make sure channels are written correctly bw.num_c(len(paths)) bw._metadata.image().Pixels.channel_count = bw.num_c() # Process the data in tiles threads = [] count = 0 total = bw.num_c() * bw.num_z() * \ (bw.num_x()//chunk_size + 1) * (bw.num_y()//chunk_size + 1) with ThreadPoolExecutor(cpu_count()) as executor: for c, file in enumerate(paths): br = BioReader(file['file']) C = [c]