コード例 #1
0
min_algor = 'Newton'
min_options = ()
sim_boot = 6000
scaling_list = [1.0, 1.0]

# Start dic.
my_dic = OrderedDict()

# Get the data.
for cur_spin, mol_name, resi, resn, spin_id in spin_loop(full_info=True, return_id=True, skip_desel=True):
    # Add key to dic.
    my_dic[spin_id] = OrderedDict()

    # Generate spin string.
    spin_string = generate_spin_string(spin=cur_spin, mol_name=mol_name, res_num=resi, res_name=resn)

    for exp_type, frq, offset, point, ei, mi, oi, di in loop_exp_frq_offset_point(return_indices=True):
        # Generate the param_key.
        param_key = return_param_key_from_data(exp_type=exp_type, frq=frq, offset=offset, point=point)

        # Add key to dic.
        my_dic[spin_id][param_key] = OrderedDict()

        # Get the value.
        r2eff = getattr(cur_spin, 'r2eff')[param_key]
        my_dic[spin_id][param_key]['r2eff'] = r2eff
        #r2eff_err = getattr(cur_spin, 'r2eff_err')[param_key]
        i0 = getattr(cur_spin, 'i0')[param_key]
        #i0_err = getattr(cur_spin, 'i0_err')[param_key]
        my_dic[spin_id][param_key]['i0'] = i0
コード例 #2
0
ファイル: optimisation.py プロジェクト: pombredanne/relax
def calculate_r2eff():
    """Calculate the R2eff values for fixed relaxation time period data."""

    # Data checks.
    check_exp_type()
    check_disp_points()
    check_exp_type_fixed_time()

    # Printouts.
    print("Calculating the R2eff/R1rho values for fixed relaxation time period data.")

    # Loop over the spins.
    for spin, mol_name, resi, resn, spin_id in spin_loop(full_info=True, return_id=True, skip_desel=True):
        # Spin ID printout.
        print("Spin '%s'." % spin_id)

        # Skip spins which have no data.
        if not hasattr(spin, 'peak_intensity'):
            continue

        # Initialise the data structures.
        if not hasattr(spin, 'r2eff'):
            spin.r2eff = {}
        if not hasattr(spin, 'r2eff_err'):
            spin.r2eff_err = {}

        # Loop over all the data.
        for exp_type, frq, offset, point, time in loop_exp_frq_offset_point_time():
            # The three keys.
            ref_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=None, time=time)
            int_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=point, time=time)
            param_key = return_param_key_from_data(exp_type=exp_type, frq=frq, offset=offset, point=point)

            # Check for missing data.
            missing = False
            for i in range(len(ref_keys)):
                if ref_keys[i] not in spin.peak_intensity:
                    missing = True
            for i in range(len(int_keys)):
                if int_keys[i] not in spin.peak_intensity:
                    missing = True
            if missing:
                continue

            # Average the reference intensity data and errors.
            ref_intensity = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=None, time=time)
            ref_intensity_err = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=None, time=time, error=True)

            # Average the intensity data and errors.
            intensity = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time)
            intensity_err = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, error=True)

            # Check for math domain errors or log for values less than 0.0.
            if ref_intensity == 0.0:
                skip_data = True
            elif float(intensity) / ref_intensity <= 0.0:
                skip_data = True
            else:
                skip_data = False

            if skip_data:
                spin_string = generate_spin_string(spin=spin, mol_name=mol_name, res_num=resi, res_name=resn)
                msg = "Math log(I / I_ref) domain error for spin '%s' in R2eff value calculation for fixed relaxation time period data.  I=%3.3f, I_ref=%3.3f.  The point is skipped." % (spin_string, intensity, ref_intensity)
                warn(RelaxWarning("%s" % msg))
                point_info = "This happened for '%s' at %3.1f MHz, for offset=%3.1f ppm and dispersion point %3.1f Hz and time %1.2f s.\n" % (exp_type, frq/1E6, offset, point, time)
                print(point_info)
            else:
                # Calculate the R2eff value.
                spin.r2eff[param_key] = calc_two_point_r2eff(relax_time=time, I_ref=ref_intensity, I=intensity)

                # Calculate the R2eff error.
                spin.r2eff_err[param_key] = calc_two_point_r2eff_err(relax_time=time, I_ref=ref_intensity, I=intensity, I_ref_err=ref_intensity_err, I_err=intensity_err)
コード例 #3
0
ファイル: 4_inspect_results.py プロジェクト: tlinnet/relax
    if model == MODEL_R2EFF:
        continue

    # Switch to pipe.
    pipe_name = '%s - relax_disp' % (model)
    pipes.append(pipe_name)
    pipe.switch(pipe_name=pipe_name)
    print("\nModel: %s" % (model))

    # Loop over the spins.
    for cur_spin, mol_name, resi, resn, spin_id in spin_loop(full_info=True,
                                                             return_id=True,
                                                             skip_desel=True):
        # Generate spin string.
        spin_string = generate_spin_string(spin=cur_spin,
                                           mol_name=mol_name,
                                           res_num=resi,
                                           res_name=resn)

        # Loop over the parameters.
        print("\nOptimised parameters for spin: %s" % (spin_string))
        for param in cur_spin.params + ['chi2']:
            # Get the value.
            if param in ['r1', 'r2']:
                for exp_type, frq, ei, mi in loop_exp_frq(return_indices=True):
                    # Generate the R20 key.
                    r20_key = generate_r20_key(exp_type=exp_type, frq=frq)

                    # Get the value.
                    value = getattr(cur_spin, param)[r20_key]

                    # Print value.
コード例 #4
0
ファイル: optimisation.py プロジェクト: tlinnet/relax
def calculate_r2eff():
    """Calculate the R2eff values for fixed relaxation time period data."""

    # Data checks.
    check_exp_type()
    check_disp_points()
    check_exp_type_fixed_time()

    # Printouts.
    print("Calculating the R2eff/R1rho values for fixed relaxation time period data.")

    # Loop over the spins.
    for spin, mol_name, resi, resn, spin_id in spin_loop(full_info=True, return_id=True, skip_desel=True):
        # Spin ID printout.
        print("Spin '%s'." % spin_id)

        # Skip spins which have no data.
        if not hasattr(spin, 'peak_intensity'):
            continue

        # Initialise the data structures.
        if not hasattr(spin, 'r2eff'):
            spin.r2eff = {}
        if not hasattr(spin, 'r2eff_err'):
            spin.r2eff_err = {}

        # Loop over all the data.
        for exp_type, frq, offset, point, time in loop_exp_frq_offset_point_time():
            # The three keys.
            ref_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=None, time=time)
            int_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=point, time=time)
            param_key = return_param_key_from_data(exp_type=exp_type, frq=frq, offset=offset, point=point)

            # Check for missing data.
            missing = False
            for i in range(len(ref_keys)):
                if ref_keys[i] not in spin.peak_intensity:
                    missing = True
            for i in range(len(int_keys)):
                if int_keys[i] not in spin.peak_intensity:
                    missing = True
            if missing:
                continue

            # Average the reference intensity data and errors.
            ref_intensity = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=None, time=time)
            ref_intensity_err = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=None, time=time, error=True)

            # Average the intensity data and errors.
            intensity = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time)
            intensity_err = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, error=True)

            # Check for math domain errors or log for values less than 0.0.
            if ref_intensity == 0.0:
                skip_data = True
            elif float(intensity) / ref_intensity <= 0.0:
                skip_data = True
            else:
                skip_data = False

            if skip_data:
                spin_string = generate_spin_string(spin=spin, mol_name=mol_name, res_num=resi, res_name=resn)
                msg = "Math log(I / I_ref) domain error for spin '%s' in R2eff value calculation for fixed relaxation time period data.  I=%3.3f, I_ref=%3.3f.  The point is skipped." % (spin_string, intensity, ref_intensity)
                warn(RelaxWarning("%s" % msg))
                point_info = "This happened for '%s' at %3.1f MHz, for offset=%3.1f ppm and dispersion point %3.1f Hz and time %1.2f s.\n" % (exp_type, frq/1E6, offset, point, time)
                print(point_info)
            else:
                # Calculate the R2eff value.
                spin.r2eff[param_key] = calc_two_point_r2eff(relax_time=time, I_ref=ref_intensity, I=intensity)

                # Calculate the R2eff error.
                spin.r2eff_err[param_key] = calc_two_point_r2eff_err(relax_time=time, I_ref=ref_intensity, I=intensity, I_ref_err=ref_intensity_err, I_err=intensity_err)
コード例 #5
0
def estimate_r2eff(method='minfx',
                   min_algor='simplex',
                   c_code=True,
                   constraints=False,
                   chi2_jacobian=False,
                   spin_id=None,
                   ftol=1e-15,
                   xtol=1e-15,
                   maxfev=10000000,
                   factor=100.0,
                   verbosity=1):
    """Estimate r2eff and errors by exponential curve fitting with scipy.optimize.leastsq or minfx.

    THIS IS ONLY FOR TESTING.

    scipy.optimize.leastsq is a wrapper around MINPACK's lmdif and lmder algorithms.

    MINPACK is a FORTRAN90 library which solves systems of nonlinear equations, or carries out the least squares minimization of the residual of a set of linear or nonlinear equations.

    Errors are calculated by taking the square root of the reported co-variance.

    This can be an huge time saving step, when performing model fitting in R1rho.
    Errors of R2eff values, are normally estimated by time-consuming Monte-Carlo simulations.

    Initial guess for the starting parameter x0 = [r2eff_est, i0_est], is by converting the exponential curve to a linear problem.
    Then solving initial guess by linear least squares of: ln(Intensity[j]) = ln(i0) - time[j]* r2eff.


    @keyword method:            The method to minimise and estimate errors.  Options are: 'minfx' or 'scipy.optimize.leastsq'.
    @type method:               string
    @keyword min_algor:         The minimisation algorithm
    @type min_algor:            string
    @keyword c_code:            If optimise with C code.
    @type c_code:               bool
    @keyword constraints:       If constraints should be used.
    @type constraints:          bool
    @keyword chi2_jacobian:     If the chi2 Jacobian should be used.
    @type chi2_jacobian:        bool
    @keyword spin_id:           The spin identification string.
    @type spin_id:              str
    @keyword ftol:              The function tolerance for the relative error desired in the sum of squares, parsed to leastsq.
    @type ftol:                 float
    @keyword xtol:              The error tolerance for the relative error desired in the approximate solution, parsed to leastsq.
    @type xtol:                 float
    @keyword maxfev:            The maximum number of function evaluations, parsed to leastsq.  If zero, then 100*(N+1) is the maximum function calls.  N is the number of elements in x0=[r2eff, i0].
    @type maxfev:               int
    @keyword factor:            The initial step bound, parsed to leastsq.  It determines the initial step bound (''factor * || diag * x||'').  Should be in the interval (0.1, 100).
    @type factor:               float
    @keyword verbosity:         The amount of information to print.  The higher the value, the greater the verbosity.
    @type verbosity:            int
    """

    # Perform checks.
    check_model_type(model=MODEL_R2EFF)

    # Check that the C modules have been compiled.
    if not C_module_exp_fn and method == 'minfx':
        raise RelaxError(
            "Relaxation curve fitting is not available.  Try compiling the C modules on your platform."
        )

    # Set class scipy setting.
    E = Exp(verbosity=verbosity)
    E.set_settings_leastsq(ftol=ftol, xtol=xtol, maxfev=maxfev, factor=factor)

    # Check if intensity errors have already been calculated by the user.
    precalc = True
    for cur_spin, mol_name, resi, resn, cur_spin_id in spin_loop(
            selection=spin_id, full_info=True, return_id=True,
            skip_desel=True):
        # No structure.
        if not hasattr(cur_spin, 'peak_intensity_err'):
            precalc = False
            break

        # Determine if a spectrum ID is missing from the list.
        for id in cdp.spectrum_ids:
            if id not in cur_spin.peak_intensity_err:
                precalc = False
                break

    # Loop over the spins.
    for cur_spin, mol_name, resi, resn, cur_spin_id in spin_loop(
            selection=spin_id, full_info=True, return_id=True,
            skip_desel=True):
        # Generate spin string.
        spin_string = generate_spin_string(spin=cur_spin,
                                           mol_name=mol_name,
                                           res_num=resi,
                                           res_name=resn)

        # Print information.
        if E.verbosity >= 1:
            # Individual spin block section.
            top = 2
            if E.verbosity >= 2:
                top += 2
            subsection(file=sys.stdout,
                       text="Fitting with %s to: %s" % (method, spin_string),
                       prespace=top)
            if method == 'minfx':
                subsection(
                    file=sys.stdout,
                    text=
                    "min_algor='%s', c_code=%s, constraints=%s, chi2_jacobian?=%s"
                    % (min_algor, c_code, constraints, chi2_jacobian),
                    prespace=0)

        # Loop over each spectrometer frequency and dispersion point.
        for exp_type, frq, offset, point, ei, mi, oi, di in loop_exp_frq_offset_point(
                return_indices=True):
            # The parameter key.
            param_key = return_param_key_from_data(exp_type=exp_type,
                                                   frq=frq,
                                                   offset=offset,
                                                   point=point)

            # The peak intensities, errors and times.
            values = []
            errors = []
            times = []
            for time in loop_time(exp_type=exp_type,
                                  frq=frq,
                                  offset=offset,
                                  point=point):
                values.append(
                    average_intensity(spin=cur_spin,
                                      exp_type=exp_type,
                                      frq=frq,
                                      offset=offset,
                                      point=point,
                                      time=time))
                errors.append(
                    average_intensity(spin=cur_spin,
                                      exp_type=exp_type,
                                      frq=frq,
                                      offset=offset,
                                      point=point,
                                      time=time,
                                      error=True))
                times.append(time)

            # Convert to numpy array.
            values = asarray(values)
            errors = asarray(errors)
            times = asarray(times)

            # Initialise data.
            E.setup_data(values=values, errors=errors, times=times)

            # Get the result based on method.
            if method == 'scipy.optimize.leastsq':
                # Acquire results.
                results = minimise_leastsq(E=E)

            elif method == 'minfx':
                # Set settings.
                E.set_settings_minfx(min_algor=min_algor,
                                     c_code=c_code,
                                     chi2_jacobian=chi2_jacobian,
                                     constraints=constraints)

                # Acquire results.
                results = minimise_minfx(E=E)
            else:
                raise RelaxError(
                    "Method for minimisation not known. Try setting: method='scipy.optimize.leastsq'."
                )

            # Unpack results
            param_vector, param_vector_error, chi2, iter_count, f_count, g_count, h_count, warning = results

            # Extract values.
            r2eff = param_vector[0]
            i0 = param_vector[1]
            r2eff_err = param_vector_error[0]
            i0_err = param_vector_error[1]

            # Disassemble the parameter vector.
            disassemble_param_vector(param_vector=param_vector,
                                     spins=[cur_spin],
                                     key=param_key)

            # Errors.
            if not hasattr(cur_spin, 'r2eff_err'):
                setattr(cur_spin, 'r2eff_err',
                        deepcopy(getattr(cur_spin, 'r2eff')))
            if not hasattr(cur_spin, 'i0_err'):
                setattr(cur_spin, 'i0_err', deepcopy(getattr(cur_spin, 'i0')))

            # Set error.
            cur_spin.r2eff_err[param_key] = r2eff_err
            cur_spin.i0_err[param_key] = i0_err

            # Chi-squared statistic.
            cur_spin.chi2 = chi2

            # Iterations.
            cur_spin.f_count = f_count

            # Warning.
            cur_spin.warning = warning

            # Print information.
            print_strings = []
            if E.verbosity >= 1:
                # Add print strings.
                point_info = "%s at %3.1f MHz, for offset=%3.3f ppm and dispersion point %-5.1f, with %i time points." % (
                    exp_type, frq / 1E6, offset, point, len(times))
                print_strings.append(point_info)

                par_info = "r2eff=%3.3f r2eff_err=%3.4f, i0=%6.1f, i0_err=%3.4f, chi2=%3.3f.\n" % (
                    r2eff, r2eff_err, i0, i0_err, chi2)
                print_strings.append(par_info)

                if E.verbosity >= 2:
                    time_info = ', '.join(map(str, times))
                    print_strings.append('For time array: ' + time_info +
                                         '.\n\n')

            # Print info
            if len(print_strings) > 0:
                for print_string in print_strings:
                    print(print_string),
コード例 #6
0
def estimate_r2eff_err(spin_id=None, epsrel=0.0, verbosity=1):
    """This will estimate the R2eff and i0 errors from the covariance matrix Qxx.  Qxx is calculated from the Jacobian matrix and the optimised parameters.

    @keyword spin_id:       The spin identification string.
    @type spin_id:          str
    @param epsrel:          Any columns of R which satisfy |R_{kk}| <= epsrel |R_{11}| are considered linearly-dependent and are excluded from the covariance matrix, where the corresponding rows and columns of the covariance matrix are set to zero.
    @type epsrel:           float
    @keyword verbosity:     The amount of information to print.  The higher the value, the greater the verbosity.
    @type verbosity:        int
    """

    # Check that the C modules have been compiled.
    if not C_module_exp_fn:
        raise RelaxError(
            "Relaxation curve fitting is not available.  Try compiling the C modules on your platform."
        )

    # Perform checks.
    check_model_type(model=MODEL_R2EFF)

    # Loop over the spins.
    for cur_spin, mol_name, resi, resn, cur_spin_id in spin_loop(
            selection=spin_id, full_info=True, return_id=True,
            skip_desel=True):
        # Generate spin string.
        spin_string = generate_spin_string(spin=cur_spin,
                                           mol_name=mol_name,
                                           res_num=resi,
                                           res_name=resn)

        # Raise Error, if not optimised.
        if not (hasattr(cur_spin, 'r2eff') and hasattr(cur_spin, 'i0')):
            raise RelaxError(
                "Spin %s does not contain optimised 'r2eff' and 'i0' values.  Try execute: minimise.execute(min_algor='Newton', constraints=False)"
                % (spin_string))

        # Raise warning, if gradient count is 0.  This could point to a lack of minimisation first.
        if hasattr(cur_spin, 'g_count'):
            if getattr(cur_spin, 'g_count') == 0.0:
                text = "Spin %s contains a gradient count of 0.0.  Is the R2eff parameter optimised?  Try execute: minimise.execute(min_algor='Newton', constraints=False)" % (
                    spin_string)
                warn(RelaxWarning("%s." % text))

        # Print information.
        if verbosity >= 1:
            # Individual spin block section.
            top = 2
            if verbosity >= 2:
                top += 2
            subsection(file=sys.stdout,
                       text="Estimating R2eff error for spin: %s" %
                       spin_string,
                       prespace=top)

        # Loop over each spectrometer frequency and dispersion point.
        for exp_type, frq, offset, point, ei, mi, oi, di in loop_exp_frq_offset_point(
                return_indices=True):
            # The parameter key.
            param_key = return_param_key_from_data(exp_type=exp_type,
                                                   frq=frq,
                                                   offset=offset,
                                                   point=point)

            # Extract values.
            r2eff = getattr(cur_spin, 'r2eff')[param_key]
            i0 = getattr(cur_spin, 'i0')[param_key]

            # Pack data
            param_vector = [r2eff, i0]

            # The peak intensities, errors and times.
            values = []
            errors = []
            times = []
            for time in loop_time(exp_type=exp_type,
                                  frq=frq,
                                  offset=offset,
                                  point=point):
                values.append(
                    average_intensity(spin=cur_spin,
                                      exp_type=exp_type,
                                      frq=frq,
                                      offset=offset,
                                      point=point,
                                      time=time))
                errors.append(
                    average_intensity(spin=cur_spin,
                                      exp_type=exp_type,
                                      frq=frq,
                                      offset=offset,
                                      point=point,
                                      time=time,
                                      error=True))
                times.append(time)

            # Convert to numpy array.
            values = asarray(values)
            errors = asarray(errors)
            times = asarray(times)

            # Initialise data in C code.
            scaling_list = [1.0, 1.0]
            model = Relax_fit_opt(model='exp',
                                  num_params=len(param_vector),
                                  values=values,
                                  errors=errors,
                                  relax_times=times,
                                  scaling_matrix=scaling_list)

            # Use the direct Jacobian from function.
            jacobian_matrix_exp = transpose(
                asarray(model.jacobian(param_vector)))
            weights = 1. / errors**2

            # Get the co-variance
            pcov = multifit_covar(J=jacobian_matrix_exp, weights=weights)

            # To compute one standard deviation errors on the parameters, take the square root of the diagonal covariance.
            param_vector_error = sqrt(diag(pcov))

            # Extract values.
            r2eff_err, i0_err = param_vector_error

            # Copy r2eff dictionary, to r2eff_err dictionary. They have same keys to the dictionary,
            if not hasattr(cur_spin, 'r2eff_err'):
                setattr(cur_spin, 'r2eff_err',
                        deepcopy(getattr(cur_spin, 'r2eff')))
            if not hasattr(cur_spin, 'i0_err'):
                setattr(cur_spin, 'i0_err', deepcopy(getattr(cur_spin, 'i0')))

            # Set error.
            cur_spin.r2eff_err[param_key] = r2eff_err
            cur_spin.i0_err[param_key] = i0_err

            # Get other relevant information.
            chi2 = getattr(cur_spin, 'chi2')

            # Print information.
            print_strings = []
            if verbosity >= 1:
                # Add print strings.
                point_info = "%s at %3.1f MHz, for offset=%3.3f ppm and dispersion point %-5.1f, with %i time points." % (
                    exp_type, frq / 1E6, offset, point, len(times))
                print_strings.append(point_info)

                par_info = "r2eff=%3.3f r2eff_err=%3.4f, i0=%6.1f, i0_err=%3.4f, chi2=%3.3f.\n" % (
                    r2eff, r2eff_err, i0, i0_err, chi2)
                print_strings.append(par_info)

                if verbosity >= 2:
                    time_info = ', '.join(map(str, times))
                    print_strings.append('For time array: ' + time_info +
                                         '.\n\n')

            # Print info
            if len(print_strings) > 0:
                for print_string in print_strings:
                    print(print_string),
コード例 #7
0
ファイル: estimate_r2eff.py プロジェクト: pombredanne/relax
def estimate_r2eff_err(spin_id=None, epsrel=0.0, verbosity=1):
    """This will estimate the R2eff and i0 errors from the covariance matrix Qxx.  Qxx is calculated from the Jacobian matrix and the optimised parameters.

    @keyword spin_id:       The spin identification string.
    @type spin_id:          str
    @param epsrel:          Any columns of R which satisfy |R_{kk}| <= epsrel |R_{11}| are considered linearly-dependent and are excluded from the covariance matrix, where the corresponding rows and columns of the covariance matrix are set to zero.
    @type epsrel:           float
    @keyword verbosity:     The amount of information to print.  The higher the value, the greater the verbosity.
    @type verbosity:        int
    """

    # Check that the C modules have been compiled.
    if not C_module_exp_fn:
        raise RelaxError("Relaxation curve fitting is not available.  Try compiling the C modules on your platform.")

    # Perform checks.
    check_model_type(model=MODEL_R2EFF)

    # Loop over the spins.
    for cur_spin, mol_name, resi, resn, cur_spin_id in spin_loop(selection=spin_id, full_info=True, return_id=True, skip_desel=True):
        # Generate spin string.
        spin_string = generate_spin_string(spin=cur_spin, mol_name=mol_name, res_num=resi, res_name=resn)

        # Raise Error, if not optimised.
        if not (hasattr(cur_spin, 'r2eff') and hasattr(cur_spin, 'i0')):
            raise RelaxError("Spin %s does not contain optimised 'r2eff' and 'i0' values.  Try execute: minimise.execute(min_algor='Newton', constraints=False)"%(spin_string))

        # Raise warning, if gradient count is 0.  This could point to a lack of minimisation first.
        if hasattr(cur_spin, 'g_count'):
            if getattr(cur_spin, 'g_count') == 0.0:
                text = "Spin %s contains a gradient count of 0.0.  Is the R2eff parameter optimised?  Try execute: minimise.execute(min_algor='Newton', constraints=False)" %(spin_string)
                warn(RelaxWarning("%s." % text))

        # Print information.
        if verbosity >= 1:
            # Individual spin block section.
            top = 2
            if verbosity >= 2:
                top += 2
            subsection(file=sys.stdout, text="Estimating R2eff error for spin: %s"%spin_string, prespace=top)

        # Loop over each spectrometer frequency and dispersion point.
        for exp_type, frq, offset, point, ei, mi, oi, di in loop_exp_frq_offset_point(return_indices=True):
            # The parameter key.
            param_key = return_param_key_from_data(exp_type=exp_type, frq=frq, offset=offset, point=point)

            # Extract values.
            r2eff = getattr(cur_spin, 'r2eff')[param_key]
            i0 = getattr(cur_spin, 'i0')[param_key]

            # Pack data
            param_vector = [r2eff, i0]

            # The peak intensities, errors and times.
            values = []
            errors = []
            times = []
            for time in loop_time(exp_type=exp_type, frq=frq, offset=offset, point=point):
                values.append(average_intensity(spin=cur_spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time))
                errors.append(average_intensity(spin=cur_spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, error=True))
                times.append(time)

            # Convert to numpy array.
            values = asarray(values)
            errors = asarray(errors)
            times = asarray(times)

            # Initialise data in C code.
            scaling_list = [1.0, 1.0]
            model = Relax_fit_opt(model='exp', num_params=len(param_vector), values=values, errors=errors, relax_times=times, scaling_matrix=scaling_list)

            # Use the direct Jacobian from function.
            jacobian_matrix_exp = transpose(asarray( model.jacobian(param_vector) ) )
            weights = 1. / errors**2

            # Get the co-variance
            pcov = multifit_covar(J=jacobian_matrix_exp, weights=weights)

            # To compute one standard deviation errors on the parameters, take the square root of the diagonal covariance.
            param_vector_error = sqrt(diag(pcov))

            # Extract values.
            r2eff_err, i0_err = param_vector_error

            # Copy r2eff dictionary, to r2eff_err dictionary. They have same keys to the dictionary,
            if not hasattr(cur_spin, 'r2eff_err'):
                setattr(cur_spin, 'r2eff_err', deepcopy(getattr(cur_spin, 'r2eff')))
            if not hasattr(cur_spin, 'i0_err'):
                setattr(cur_spin, 'i0_err', deepcopy(getattr(cur_spin, 'i0')))

            # Set error.
            cur_spin.r2eff_err[param_key] = r2eff_err
            cur_spin.i0_err[param_key] = i0_err

            # Get other relevant information.
            chi2 = getattr(cur_spin, 'chi2')

            # Print information.
            print_strings = []
            if verbosity >= 1:
                # Add print strings.
                point_info = "%s at %3.1f MHz, for offset=%3.3f ppm and dispersion point %-5.1f, with %i time points." % (exp_type, frq/1E6, offset, point, len(times))
                print_strings.append(point_info)

                par_info = "r2eff=%3.3f r2eff_err=%3.4f, i0=%6.1f, i0_err=%3.4f, chi2=%3.3f.\n" % ( r2eff, r2eff_err, i0, i0_err, chi2)
                print_strings.append(par_info)

                if verbosity >= 2:
                    time_info = ', '.join(map(str, times))
                    print_strings.append('For time array: '+time_info+'.\n\n')

            # Print info
            if len(print_strings) > 0:
                for print_string in print_strings:
                    print(print_string),
コード例 #8
0
ファイル: estimate_r2eff.py プロジェクト: pombredanne/relax
def estimate_r2eff(method='minfx', min_algor='simplex', c_code=True, constraints=False, chi2_jacobian=False, spin_id=None, ftol=1e-15, xtol=1e-15, maxfev=10000000, factor=100.0, verbosity=1):
    """Estimate r2eff and errors by exponential curve fitting with scipy.optimize.leastsq or minfx.

    THIS IS ONLY FOR TESTING.

    scipy.optimize.leastsq is a wrapper around MINPACK's lmdif and lmder algorithms.

    MINPACK is a FORTRAN90 library which solves systems of nonlinear equations, or carries out the least squares minimization of the residual of a set of linear or nonlinear equations.

    Errors are calculated by taking the square root of the reported co-variance.

    This can be an huge time saving step, when performing model fitting in R1rho.
    Errors of R2eff values, are normally estimated by time-consuming Monte-Carlo simulations.

    Initial guess for the starting parameter x0 = [r2eff_est, i0_est], is by converting the exponential curve to a linear problem.
    Then solving initial guess by linear least squares of: ln(Intensity[j]) = ln(i0) - time[j]* r2eff.


    @keyword method:            The method to minimise and estimate errors.  Options are: 'minfx' or 'scipy.optimize.leastsq'.
    @type method:               string
    @keyword min_algor:         The minimisation algorithm
    @type min_algor:            string
    @keyword c_code:            If optimise with C code.
    @type c_code:               bool
    @keyword constraints:       If constraints should be used.
    @type constraints:          bool
    @keyword chi2_jacobian:     If the chi2 Jacobian should be used.
    @type chi2_jacobian:        bool
    @keyword spin_id:           The spin identification string.
    @type spin_id:              str
    @keyword ftol:              The function tolerance for the relative error desired in the sum of squares, parsed to leastsq.
    @type ftol:                 float
    @keyword xtol:              The error tolerance for the relative error desired in the approximate solution, parsed to leastsq.
    @type xtol:                 float
    @keyword maxfev:            The maximum number of function evaluations, parsed to leastsq.  If zero, then 100*(N+1) is the maximum function calls.  N is the number of elements in x0=[r2eff, i0].
    @type maxfev:               int
    @keyword factor:            The initial step bound, parsed to leastsq.  It determines the initial step bound (''factor * || diag * x||'').  Should be in the interval (0.1, 100).
    @type factor:               float
    @keyword verbosity:         The amount of information to print.  The higher the value, the greater the verbosity.
    @type verbosity:            int
    """

    # Perform checks.
    check_model_type(model=MODEL_R2EFF)

    # Check that the C modules have been compiled.
    if not C_module_exp_fn and method == 'minfx':
        raise RelaxError("Relaxation curve fitting is not available.  Try compiling the C modules on your platform.")

    # Set class scipy setting.
    E = Exp(verbosity=verbosity)
    E.set_settings_leastsq(ftol=ftol, xtol=xtol, maxfev=maxfev, factor=factor)

    # Check if intensity errors have already been calculated by the user.
    precalc = True
    for cur_spin, mol_name, resi, resn, cur_spin_id in spin_loop(selection=spin_id, full_info=True, return_id=True, skip_desel=True):
        # No structure.
        if not hasattr(cur_spin, 'peak_intensity_err'):
            precalc = False
            break

        # Determine if a spectrum ID is missing from the list.
        for id in cdp.spectrum_ids:
            if id not in cur_spin.peak_intensity_err:
                precalc = False
                break

    # Loop over the spins.
    for cur_spin, mol_name, resi, resn, cur_spin_id in spin_loop(selection=spin_id, full_info=True, return_id=True, skip_desel=True):
        # Generate spin string.
        spin_string = generate_spin_string(spin=cur_spin, mol_name=mol_name, res_num=resi, res_name=resn)

        # Print information.
        if E.verbosity >= 1:
            # Individual spin block section.
            top = 2
            if E.verbosity >= 2:
                top += 2
            subsection(file=sys.stdout, text="Fitting with %s to: %s"%(method, spin_string), prespace=top)
            if method == 'minfx':
                subsection(file=sys.stdout, text="min_algor='%s', c_code=%s, constraints=%s, chi2_jacobian?=%s"%(min_algor, c_code, constraints, chi2_jacobian), prespace=0)

        # Loop over each spectrometer frequency and dispersion point.
        for exp_type, frq, offset, point, ei, mi, oi, di in loop_exp_frq_offset_point(return_indices=True):
            # The parameter key.
            param_key = return_param_key_from_data(exp_type=exp_type, frq=frq, offset=offset, point=point)

            # The peak intensities, errors and times.
            values = []
            errors = []
            times = []
            for time in loop_time(exp_type=exp_type, frq=frq, offset=offset, point=point):
                values.append(average_intensity(spin=cur_spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time))
                errors.append(average_intensity(spin=cur_spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, error=True))
                times.append(time)

            # Convert to numpy array.
            values = asarray(values)
            errors = asarray(errors)
            times = asarray(times)

            # Initialise data.
            E.setup_data(values=values, errors=errors, times=times)

            # Get the result based on method.
            if method == 'scipy.optimize.leastsq':
                # Acquire results.
                results = minimise_leastsq(E=E)

            elif method == 'minfx':
                # Set settings.
                E.set_settings_minfx(min_algor=min_algor, c_code=c_code, chi2_jacobian=chi2_jacobian, constraints=constraints)

                # Acquire results.
                results = minimise_minfx(E=E)
            else:
                raise RelaxError("Method for minimisation not known. Try setting: method='scipy.optimize.leastsq'.")

            # Unpack results
            param_vector, param_vector_error, chi2, iter_count, f_count, g_count, h_count, warning = results

            # Extract values.
            r2eff = param_vector[0]
            i0 = param_vector[1]
            r2eff_err = param_vector_error[0]
            i0_err = param_vector_error[1]

            # Disassemble the parameter vector.
            disassemble_param_vector(param_vector=param_vector, spins=[cur_spin], key=param_key)

            # Errors.
            if not hasattr(cur_spin, 'r2eff_err'):
                setattr(cur_spin, 'r2eff_err', deepcopy(getattr(cur_spin, 'r2eff')))
            if not hasattr(cur_spin, 'i0_err'):
                setattr(cur_spin, 'i0_err', deepcopy(getattr(cur_spin, 'i0')))

            # Set error.
            cur_spin.r2eff_err[param_key] = r2eff_err
            cur_spin.i0_err[param_key] = i0_err

            # Chi-squared statistic.
            cur_spin.chi2 = chi2

            # Iterations.
            cur_spin.f_count = f_count

            # Warning.
            cur_spin.warning = warning

            # Print information.
            print_strings = []
            if E.verbosity >= 1:
                # Add print strings.
                point_info = "%s at %3.1f MHz, for offset=%3.3f ppm and dispersion point %-5.1f, with %i time points." % (exp_type, frq/1E6, offset, point, len(times))
                print_strings.append(point_info)

                par_info = "r2eff=%3.3f r2eff_err=%3.4f, i0=%6.1f, i0_err=%3.4f, chi2=%3.3f.\n" % ( r2eff, r2eff_err, i0, i0_err, chi2)
                print_strings.append(par_info)

                if E.verbosity >= 2:
                    time_info = ', '.join(map(str, times))
                    print_strings.append('For time array: '+time_info+'.\n\n')

            # Print info
            if len(print_strings) > 0:
                for print_string in print_strings:
                    print(print_string),