def _run_composite_model(self, model, recalculate, model_names, apply_user_provided_initialization=False): with mot.configuration.config_context( RuntimeConfigurationAction( cl_environments=self._cl_runtime_info.cl_environments, load_balancer=self._cl_runtime_info.load_balancer)): if apply_user_provided_initialization: self._apply_user_provided_initialization_data(model) optimizer = self._optimizer or get_optimizer_for_model(model_names) optimizer.set_cl_runtime_info(self._cl_runtime_info) fitter = SingleModelFit(model, self._input_data, self._output_folder, optimizer, self._tmp_results_dir, recalculate=recalculate, cascade_names=model_names) results = fitter.run() map_results = get_all_nifti_data( os.path.join(self._output_folder, model.name)) return results, map_results
def combine(self): super(FittingProcessor, self).combine() for subdir in self._subdirs: self._combine_volumes(self._output_dir, self._tmp_storage_dir, self._nifti_header, maps_subdir=subdir) return create_roi(get_all_nifti_data(self._output_dir), self._mask)
def sort_orientations(data_input, weight_names, extra_sortable_maps): """Sort the orientations of multi-direction models voxel-wise. This expects as input 3d/4d volumes. Do not use this with 2d arrays. This can be used to sort, for example, simulations of the BallStick_r3 model (with three Sticks). There is no voxel-wise order over Sticks since for the model they are all equal compartments. However, when using optimization or ARD with sampling, there is order within the compartments since the ARD is commonly placed on the second and third Sticks meaning these Sticks and there corresponding orientations are compressed to zero if they are not supported. In that case, the Stick with the primary orientation of diffusion has to be the first. This method accepts as input results from (MDT) model fitting and is able to sort all the maps belonging to a given set of equal compartments per voxel. Example:: sort_orientations('./output/BallStick_r3', ['w_stick0.w', 'w_stick1.w', 'w_stick2.w'], [['Stick0.theta', 'Stick1.theta', 'Stick2.theta'], ['Stick0.phi', 'Stick1.phi', 'Stick2.phi'], ...]) Args: data_input (str or dict): either a directory or a dictionary containing the maps weight_names (iterable of str): The names of the maps we use for sorting all other maps. These will be sorted as well. extra_sortable_maps (iterable of iterable): the list of additional maps to sort. Every element in the given list should be another list with the names of the maps. The length of these second layer of lists should match the length of the ``weight_names``. Returns: dict: the sorted results in a new dictionary. This returns all input maps with some of them sorted. """ if isinstance(data_input, string_types): input_maps = get_all_nifti_data(data_input) result_maps = input_maps else: input_maps = data_input result_maps = copy(input_maps) weight_names = list(weight_names) sortable_maps = copy(extra_sortable_maps) sortable_maps.append(weight_names) sort_index_matrix = create_sort_matrix([input_maps[k] for k in weight_names], reversed_sort=True) for sortable_map_names in sortable_maps: sorted = dict(zip(sortable_map_names, sort_volumes_per_voxel([input_maps[k] for k in sortable_map_names], sort_index_matrix))) result_maps.update(sorted) return result_maps
def run(self): """Fits the composite model and returns the results as ROI lists per map.""" if not self.recalculate and model_output_exists( self._model, self._output_folder): maps = get_all_nifti_data(self._output_path) self._logger.info('Not recalculating {} model'.format( self._model.name)) return create_roi(maps, self._input_data.mask) with per_model_logging_context(self._output_path): self._logger.info('Using MDT version {}'.format(__version__)) self._logger.info('Preparing for model {0}'.format( self._model.name)) self._logger.info('Current cascade: {0}'.format( self._cascade_names)) self._model.set_input_data(self._input_data) if self.recalculate: if os.path.exists(self._output_path): list( map( os.remove, glob.glob(os.path.join(self._output_path, '*.nii*')))) if not os.path.exists(self._output_path): os.makedirs(self._output_path) with self._logging(): tmp_dir = get_full_tmp_results_path(self._output_path, self._tmp_results_dir) self._logger.info( 'Saving temporary results in {}.'.format(tmp_dir)) worker = FittingProcessor(self._optimizer, self._model, self._input_data.mask, self._input_data.nifti_header, self._output_path, tmp_dir, self.recalculate) processing_strategy = get_processing_strategy('optimization') results = processing_strategy.process(worker) self._write_protocol(self._model.get_input_data().protocol) return results
def create_signal_estimates(model, input_data, parameters): """Create the signals estimates for your estimated model parameters. This function is typically used to obtain signal estimates from optimization results. This function evaluates the model as it is in the model fitting and sampling. That is, this method includes the gradient deviations (if set in the input data) and loads all static and fixed parameters maps. Args: model (str or model): the model or the name of the model to use for estimating the signals input_data (mdt.utils.MRIInputData): the input data object, we will set this to the model parameters (str or dict): either a directory file name or a dictionary containing optimization results Each element is assumed to be a 4d volume with the voxels we are using for the simulations. Returns: ndarray: the 4d array with the signal estimates per voxel """ if isinstance(model, string_types): model = get_model(model)() model.set_input_data(input_data) if isinstance(parameters, string_types): parameters = get_all_nifti_data(parameters) parameters = create_roi(parameters, input_data.mask) parameters = model.param_dict_to_array(parameters) build_model = model.build() if parameters.shape[0] != build_model.get_nmr_problems(): raise ValueError( 'The number of voxels in the parameters does not match those in the model.' ) calculator = CalculateModelEstimates() results = calculator.calculate(model.build(), parameters) return restore_volumes(results, input_data.mask)