def run(self, distribute: Optional[bool] = False, **kwargs): config = general.load_config_file() save_dir = self.step_local_staging_dir / "data" save_dir.mkdir(parents=True, exist_ok=True) space = shapespace.ShapeSpace(config) space.load_shape_space_axes() space.load_shapemode_manifest() collector = ObjectCollector(config) structure = "SLC25A17" for shapemode in ['DNA_MEM_PC1']: for b in [2, 8]: prefix = f"{shapemode}_B{b}" space.set_active_axis(shapemode, digitize=True) space.set_active_structure(structure) space.set_active_bin(b) dna = space.get_dna_mesh_of_bin(b) mem = space.get_mem_mesh_of_bin(b) seg = cytoparam.voxelize_meshes([mem, dna]) save_as = save_dir / f"{prefix}.tif" with writers.ome_tiff_writer.OmeTiffWriter( save_as, overwrite_file=True) as writer: writer.save(seg[0], dimension_order='ZYX', image_name=save_as.stem) CellIds = space.get_active_cellids() with concurrent.futures.ProcessPoolExecutor( cluster.get_ncores()) as executor: objs = list( tqdm(executor.map( collector.collect_segmented_objects, [space.meta.loc[CellId] for CellId in CellIds]), total=len(CellIds))) objs = [o for obj in objs for o in obj] img = collector.pack_objs(objs) collector.save_img( img, f"{structure}_{prefix}_ncells_{len(CellIds)}") return None
self.domain = domain self.origin = origin self.coords_param = coords_param return def morph_on_shapemode_shape(self): self.voxelize_and_parameterize_shapemode_shape() self.morphed = cytoparam.morph_representation_on_shape( img=self.domain, param_img_coords=self.coords_param, representation=self.aggregated_parameterized_intensity) self.morphed = np.stack([self.domain, self.morphed]) return if __name__ == "__main__": config = general.load_config_file() control = controller.Controller(config) parser = argparse.ArgumentParser(description='Batch aggregation.') parser.add_argument('--csv', help='Path to the dataframe.', required=True) args = vars(parser.parse_args()) df = pd.read_csv(args['csv'], index_col=0) aggregator = Aggregator(control) for index, row in tqdm(df.iterrows(), total=len(df)): '''Concurrent processes inside. Do not use concurrent here.''' aggregator.execute(row)