def _process(self, roi_indices, next_indices=None): build_model = self._model.build(roi_indices) codec = self._model.get_parameter_codec() x0 = codec.encode(build_model.get_initial_parameters(), build_model.get_kernel_data()) cl_runtime_info = CLRuntimeInfo() self._logger.info('Starting minimization') self._logger.info('Using MOT version {}'.format(mot.__version__)) self._logger.info( 'We will use a {} precision float type for the calculations.'. format('double' if cl_runtime_info.double_precision else 'single')) for env in cl_runtime_info.get_cl_environments(): self._logger.info('Using device \'{}\'.'.format(str(env))) self._logger.info('Using compile flags: {}'.format( cl_runtime_info.get_compile_flags())) if self._optimizer_options: self._logger.info('We will use the optimizer {} ' 'with optimizer settings {}'.format( self._method, self._optimizer_options)) else: self._logger.info( 'We will use the optimizer {} with default settings.'.format( self._method)) objective_func = wrap_objective_function( build_model.get_objective_function(), codec.get_decode_function(), x0.shape[1]) results = minimize(objective_func, x0, method=self._method, nmr_observations=build_model.get_nmr_observations(), cl_runtime_info=cl_runtime_info, data=build_model.get_kernel_data(), options=self._optimizer_options) self._logger.info('Finished optimization') self._logger.info('Starting post-processing') x_final = codec.decode(results['x'], build_model.get_kernel_data()) results = build_model.get_post_optimization_output( x_final, results['status']) results.update({ self._used_mask_name: np.ones(roi_indices.shape[0], dtype=np.bool) }) self._logger.info('Finished post-processing') self._write_output_recursive(results, roi_indices)
When optimized the parameters should all be equal to 1. """ # How many Rosenbrock dimensions/parameters we want to fit and sample nmr_params = 2 # How many unique instances of the Rosenbrock function nmr_problems = 10000 ## Optimization ## # The optimization starting points x0 = np.ones((nmr_problems, nmr_params)) * 3 # Minimize the parameters of the model given the starting points. opt_output = minimize(get_objective_function(nmr_params), x0, options={'patience': 5}) # Print the output print(opt_output['x']) ## Sampling ## # Create an instance of the sample routine we want to use. sampler = AdaptiveMetropolisWithinGibbs( get_log_likelihood_function(nmr_params), get_log_prior_function(nmr_params), x0, np.ones_like(x0) # The initial proposal standard deviations ) # Sample each Rosenbrock instance
be perfect for every simulated distribution. In general though, fit results should match the ground truth. """ # The number of unique distributions, this is typically very large nmr_simulations = 1000 # How many data points per distribution, this is typically small nmr_datapoints = 25 # generate a range of parameters, basically the ground truth shape = np.random.uniform(0.1, 10, nmr_simulations) scale = np.random.uniform(0.1, 5, nmr_simulations) # generate some random locations on those simulated distributions gamma_random = np.zeros((nmr_simulations, nmr_datapoints)) for i in range(nmr_datapoints): gamma_random[:, i] = np.random.gamma(shape, scale) # The optimization starting points for shape and scale x0 = np.ones((nmr_simulations, 2)) # Minimize the parameters of the model given the starting points. opt_output = minimize(get_objective_function(nmr_datapoints), x0, data=Struct({'gamma_random': Array(gamma_random)}, 'optimization_data')) # Print the output print(np.column_stack([shape, scale])) print(opt_output.x) print(np.abs(opt_output.x - np.column_stack([shape, scale])))