def _write_thresholded_label_map(self, label_map: np.ndarray, hits, out: Path): """ Write a label map with only the 'hit' organs in it """ if len(hits) > 0: # Make a copy as it may be being used elsewhere l = np.copy(label_map) # Clear any non-hits l[~np.isin(l, hits)] = 0 write_array(l, out, ras=True)
def _write(self, t_stats, pvals, qvals, outdir, name): filtered_tstats = result_cutoff_filter(t_stats, qvals) filtered_result = self.rebuild_array(filtered_tstats, self.shape, self.mask) unfiltered_result = self.rebuild_array(t_stats, self.shape, self.mask) heatmap_path = outdir / f'{name}_{self.stats_name}_t_fdr5.nrrd' heatmap_path_unfiltered = outdir / f'{name}_{self.stats_name}_t.nrrd' # Write qval-filtered t-stats write_array(filtered_result, heatmap_path, ras=True) # Write raw t-stats write_array(unfiltered_result, heatmap_path_unfiltered, ras=True) return heatmap_path
def _write(self, t_stats, pvals, qvals, outdir, name): if self.two_way: pvals = np.array_split(pvals, 3) f_stats = np.array_split(t_stats, 3) qvals = np.array_split(qvals, 3) groups = ['geno', 'treat', 'int'] for i, f_stat in enumerate(f_stats): filtered_fstats = result_cutoff_filter(f_stat, qvals[i]) filtered_result = self.rebuild_array(filtered_fstats, self.shape, self.mask) unfiltered_result = self.rebuild_array(f_stat, self.shape, self.mask) heatmap_path = outdir / f'{name}_{self.stats_name}_{groups[i]}_f_fdr5.nrrd' heatmap_path_unfiltered = outdir / f'{name}_{self.stats_name}_{groups[i]}_f.nrrd' # Write qval-filtered t-stats write_array(filtered_result, heatmap_path, ras=True) # Write raw t-stats write_array(unfiltered_result, heatmap_path_unfiltered, ras=True) else: filtered_tstats = result_cutoff_filter(t_stats, qvals) filtered_result = self.rebuild_array(filtered_tstats, self.shape, self.mask) unfiltered_result = self.rebuild_array(t_stats, self.shape, self.mask) heatmap_path = outdir / f'{name}_{self.stats_name}_t_fdr5.nrrd' heatmap_path_unfiltered = outdir / f'{name}_{self.stats_name}_t.nrrd' # Write qval-filtered t-stats write_array(filtered_result, heatmap_path, ras=True) # Write raw t-stats write_array(unfiltered_result, heatmap_path_unfiltered, ras=True) return heatmap_path
def _get_deformations(tform: Path, deformation_dir: Path, jacobian_dir: Path, log_jacobians_dir: Path, filetype: str, specimen_id: str, threads: int, make_jacmat: bool, write_vectors: bool = False, write_raw_jacobians: bool = False, write_log_jacobians: bool = True) -> Union[None, np.array]: """ Generate spatial jacobians and optionally deformation files. Returns ------- the jacobian array if there are any values < 0 """ cmd = ['transformix', '-out', str(deformation_dir), '-tp', str(tform), '-jac', 'all' ] if write_vectors: cmd.extend(['-def', 'all']) if make_jacmat: cmd.extend(['-jacmat', 'all']) if threads: cmd.extend(['-threads', str(threads)]) try: subprocess.check_output(cmd) except subprocess.CalledProcessError as e: logging.exception('transformix failed') logging.exception(e) # raise subprocess.CalledProcessError(f'### Transformix failed ###\nError message: {e}\nelastix command:{cmd}') raise ValueError else: deformation_out = deformation_dir / f'deformationField.{filetype}' jacobian_out = deformation_dir / f'spatialJacobian.{filetype}' # rename and move output if write_vectors: new_def = deformation_dir / (specimen_id + '.' + filetype) shutil.move(deformation_out, new_def) new_jac = jacobian_dir / (specimen_id + '.' + filetype) try: shutil.move(jacobian_out, new_jac) except IOError: # Bit of a hack. If trasforms conatain subtransforms from pairwise, elastix is unable to generate # deformation fields. So try with itk def_img = sitk.ReadImage(new_def) jac_img = sitk.DisplacementFieldJacobianDeterminant(def_img) sitk.WriteImage(jac_img, new_jac) # if we have full jacobian matrix, rename and remove that if make_jacmat: make_jacmat.mkdir() jacmat_file = deformation_dir / f'fullSpatialJacobian.{filetype}' # The name given by elastix jacmat_new = make_jacmat / (specimen_id + '.' + filetype) # New informative name shutil.move(jacmat_file, jacmat_new) # test if there has been any folding in the jacobians jac_img = sitk.ReadImage(str(new_jac)) jac_arr = sitk.GetArrayFromImage(jac_img) jac_min = jac_arr.min() jac_max = jac_arr.max() logging.info("{} spatial jacobian, min:{}, max:{}".format(specimen_id, jac_min, jac_max)) if jac_min <= 0: logging.warning( "The jacobian determinant for {} has negative values. You may need to add a penalty term to the later registration stages".format( specimen_id)) # Highlight the regions folding jac_arr[jac_arr > 0] = 0 log_jac_path = log_jacobians_dir / ('ERROR_NEGATIVE_JACOBIANS_' + specimen_id + '.' + filetype) common.write_array(jac_arr, log_jac_path) elif write_log_jacobians: # Spit out the log transformed jacobians log_jac = np.log(jac_arr) log_jac_path = log_jacobians_dir / ( 'log_jac_' + specimen_id + '.' + filetype) if not write_raw_jacobians: new_jac.unlink() common.write_array(log_jac, log_jac_path) logging.info('Finished generating deformation fields') if jac_min <=0: return jac_arr
def secondary_segmentation(config: LamaConfig): """ Use user-added scripts to segment/cleanup organs Parameters ---------- config Returns ------- """ plugin_dir = config.config_dir / config['seg_plugin_dir'] if not plugin_dir.is_dir(): logging.error(f'Cannot find plugin director: {plugin_dir}') return # Find the directories containing the segmentations # Get the final inversion stage invert_config = config['inverted_transforms'] / PROPAGATE_CONFIG segmentation_dir = cfg_load(invert_config)['label_propagation_order'][ -1] # rename to segmentation stage inverted_label_dir = config['inverted_labels'] / segmentation_dir initial_segmentation_path = next(inverted_label_dir.glob('**/*.nrrd')) first_reg_dir = config['root_reg_dir'] / config[ 'registration_stage_params'][0]['stage_id'] # usually rigid image_to_segment = next(first_reg_dir.glob('**/*.nrrd')) segmentations = [] for plugin_src in [ x for x in plugin_dir.iterdir() if str(x).endswith('.py') and x.name != 'plugin_interface.py' ]: # catch all exceptions as we don't want plugin crashing the pipeline try: spec = importlib.util.spec_from_file_location( plugin_src.stem, str(plugin_src)) plugin = importlib.util.module_from_spec(spec) spec.loader.exec_module(plugin) new_segmetation = plugin.run(image_to_segment, initial_segmentation_path) except Exception as e: logging.error(f'Plugin {plugin_src} failed\n{e}') else: segmentations.append(new_segmetation) if not segmentations: logging.error(f'No segmentations returned from {plugin_src.name}') # Merge all the segmentations into a single label map. If there are any overlaps, the plugin called last will have # priority seg = None for s in segmentations: if not seg: seg = s continue seg[s != 0] = s[s != 0] additional_seg_dir = config.mkdir('additional_seg_dir') write_array(seg, additional_seg_dir / f'{config.config_dir.name}_additonal_seg.nrrd' ) # TODO include specimen name