def param_num(spins=None): """Determine the number of parameters in the model. @keyword spins: The list of spin data containers for the block. @type spins: list of SpinContainer instances @return: The number of model parameters. @rtype: int """ # Initialise the number. num = 0 # The R2eff model. if cdp.model_type == 'R2eff': # Count the selected spins. spin_num = count_spins(spins) # Exponential curves (with clustering). if has_exponential_exp_type(): return 2 * spin_num # Fixed time period experiments (with clustering). return 1 * spin_num # Check the spin cluster. for spin in spins: # Skip deselected spins. if not spin.select: continue if len(spin.params) != len(spins[0].params): raise RelaxError("The number of parameters for each spin in the cluster are not the same.") # Count the number of R10 parameters. for spin in spins: # Skip deselected spins. if not spin.select: continue for i in range(len(spin.params)): if spin.params[i] in ['r1']: for exp_type, frq in loop_exp_frq(): num += 1 # Count the number of R20 parameters. for spin in spins: # Skip deselected spins. if not spin.select: continue for i in range(len(spin.params)): if spin.params[i] in PARAMS_R20: for exp_type, frq in loop_exp_frq(): num += 1 # Count the number of spin specific parameters for all spins. spin_params = ['phi_ex', 'phi_ex_B', 'phi_ex_C', 'padw2', 'dw', 'dwH'] for spin in spins: # Skip deselected spins. if not spin.select: continue for i in range(len(spin.params)): if spin.params[i] in spin_params: num += 1 # Count all other parameters, but only for a single spin. all_params = ['r1'] + PARAMS_R20 + spin_params for spin in spins: # Skip deselected spins. if not spin.select: continue for i in range(len(spin.params)): if not spin.params[i] in all_params: num += 1 break # Return the number. return num
def run(self, processor, completed): """Set up and perform the optimisation.""" # Print out. if self.verbosity >= 1: # Individual spin block section. top = 2 if self.verbosity >= 2: top += 2 subsection(file=sys.stdout, text="Fitting to the spin block %s"%self.spin_ids, prespace=top) # Grid search printout. if search('^[Gg]rid', self.min_algor): result = 1 for x in self.inc: result = mul(result, x) print("Unconstrained grid search size: %s (constraints may decrease this size).\n" % result) # Initialise the function to minimise. model = Dispersion(model=self.spins[0].model, num_params=self.param_num, num_spins=count_spins(self.spins), num_frq=len(self.fields), exp_types=self.exp_types, values=self.values, errors=self.errors, missing=self.missing, frqs=self.frqs, frqs_H=self.frqs_H, cpmg_frqs=self.cpmg_frqs, spin_lock_nu1=self.spin_lock_nu1, chemical_shifts=self.chemical_shifts, offset=self.offsets, tilt_angles=self.tilt_angles, r1=self.r1, relax_times=self.relax_times, scaling_matrix=self.scaling_matrix, r1_fit=self.r1_fit) # Grid search. if search('^[Gg]rid', self.min_algor): results = grid(func=model.func, args=(), num_incs=self.inc, lower=self.lower, upper=self.upper, A=self.A, b=self.b, verbosity=self.verbosity) # Unpack the results. param_vector, chi2, iter_count, warning = results f_count = iter_count g_count = 0.0 h_count = 0.0 # Minimisation. else: results = generic_minimise(func=model.func, args=(), x0=self.param_vector, min_algor=self.min_algor, min_options=self.min_options, func_tol=self.func_tol, grad_tol=self.grad_tol, maxiter=self.max_iterations, A=self.A, b=self.b, full_output=True, print_flag=self.verbosity) # Unpack the results. if results == None: return param_vector, chi2, iter_count, f_count, g_count, h_count, warning = results # Optimisation printout. if self.verbosity: print("\nOptimised parameter values:") for i in range(len(param_vector)): print("%-20s %25.15f" % (self.param_names[i], param_vector[i]*self.scaling_matrix[i, i])) # Create the result command object to send back to the master. processor.return_object(Disp_result_command(processor=processor, memo_id=self.memo_id, param_vector=param_vector, chi2=chi2, iter_count=iter_count, f_count=f_count, g_count=g_count, h_count=h_count, warning=warning, missing=self.missing, back_calc=model.get_back_calc(), completed=False))