コード例 #1
0
ファイル: auto_rx_base.py プロジェクト: pombredanne/relax
    def sync_ds(self, upload=False):
        """Synchronise the analysis frame and the relax data store, both ways.

        This method allows the frame information to be uploaded into the relax data store, or for the information in the relax data store to be downloaded by the frame.

        @keyword upload:    A flag which if True will cause the frame to send data to the relax data store.  If False, data will be downloaded from the relax data store to update the frame.
        @type upload:       bool
        """

        # The frequency.
        if upload:
            self.data.frq = gui_to_str(self.field_nmr_frq.GetValue())
        else:
            self.field_nmr_frq.SetValue(str_to_gui(self.data.frq))

        # The grid incs.
        if upload:
            self.data.grid_inc = gui_to_int(self.grid_inc.GetValue())
        elif hasattr(self.data, 'grid_inc'):
            self.grid_inc.SetValue(int(self.data.grid_inc))

        # The MC sim number.
        if upload:
            self.data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())
        elif hasattr(self.data, 'mc_sim_num'):
            self.mc_sim_num.SetValue(int(self.data.mc_sim_num))

        # The results directory.
        if upload:
            self.data.save_dir = gui_to_str(self.field_results_dir.GetValue())
        else:
            self.field_results_dir.SetValue(str_to_gui(self.data.save_dir))
コード例 #2
0
    def sync_ds(self, upload=False):
        """Synchronise the analysis frame and the relax data store, both ways.

        This method allows the frame information to be uploaded into the relax data store, or for the information in the relax data store to be downloaded by the frame.

        @keyword upload:    A flag which if True will cause the frame to send data to the relax data store.  If False, data will be downloaded from the relax data store to update the frame.
        @type upload:       bool
        """

        # The frequency.
        if upload:
            self.data.frq = gui_to_str(self.field_nmr_frq.GetValue())
        else:
            self.field_nmr_frq.SetValue(str_to_gui(self.data.frq))

        # The grid incs.
        if upload:
            self.data.grid_inc = gui_to_int(self.grid_inc.GetValue())
        elif hasattr(self.data, 'grid_inc'):
            self.grid_inc.SetValue(int(self.data.grid_inc))

        # The MC sim number.
        if upload:
            self.data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())
        elif hasattr(self.data, 'mc_sim_num'):
            self.mc_sim_num.SetValue(int(self.data.mc_sim_num))

        # The results directory.
        if upload:
            self.data.save_dir = gui_to_str(self.field_results_dir.GetValue())
        else:
            self.field_results_dir.SetValue(str_to_gui(self.data.save_dir))
コード例 #3
0
    def sync_ds(self, upload=False):
        """Synchronise the analysis frame and the relax data store, both ways.

        This method allows the frame information to be uploaded into the relax data store, or for the information in the relax data store to be downloaded by the frame.

        @keyword upload:    A flag which if True will cause the frame to send data to the relax data store.  If False, data will be downloaded from the relax data store to update the frame.
        @type upload:       bool
        """

        # The numeric solution only flag.
        if upload:
            self.data.numeric_only = self.numeric_only.GetValue()
        elif hasattr(self.data, 'numeric_only'):
            self.numeric_only.SetValue(bool(self.data.numeric_only))

        # The grid incs.
        if upload:
            self.data.grid_inc = gui_to_int(self.grid_inc.GetValue())
        elif hasattr(self.data, 'grid_inc'):
            self.grid_inc.SetValue(int(self.data.grid_inc))

        # The MC sim number.
        if upload:
            self.data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())
        elif hasattr(self.data, 'mc_sim_num'):
            self.mc_sim_num.SetValue(int(self.data.mc_sim_num))

        # The All model MC sim flag.
        if upload:
            self.data.mc_sim_all_models = self.mc_sim_all_models.GetValue()
        elif hasattr(self.data, 'mc_sim_all_models'):
            self.mc_sim_all_models.SetValue(bool(self.data.mc_sim_all_models))

        # The insignificance level.
        if upload:
            self.data.insignificance = self.insignificance.GetValue()
            try:
                self.data.insignificance = gui_to_float(self.data.insignificance)
            except:
                pass
        elif hasattr(self.data, 'insignificance'):
            self.insignificance.SetValue(float_to_gui(self.data.insignificance))

        # The results directory.
        if upload:
            self.data.save_dir = gui_to_str(self.field_results_dir.GetValue())
        else:
            self.field_results_dir.SetValue(str_to_gui(self.data.save_dir))

        # The previous run results directory.
        if upload:
            self.data.pre_run_dir = gui_to_str(self.field_pre_run_dir.GetValue())
        elif hasattr(self.data, 'pre_run_dir'):
            self.field_pre_run_dir.SetValue(str_to_gui(self.data.pre_run_dir))

        # The models to use.
        if upload:
            self.data.disp_models = self.model_field.GetValue()
        else:
            self.model_field.set_value(self.data.disp_models)
コード例 #4
0
ファイル: free_file_format.py プロジェクト: bopopescu/relax
    def save(self, event):
        """Save the free file format widget contents into the relax data store.

        @param event:   The wx event.
        @type event:    wx event
        """

        # The default GUI element.
        if self.element_type == 'default':
            # Get the column numbers.
            ds.relax_gui.free_file_format.spin_id_col = gui_to_int(
                self.spin_id_col.GetValue())
            ds.relax_gui.free_file_format.mol_name_col = gui_to_int(
                self.mol_name_col.GetValue())
            ds.relax_gui.free_file_format.res_num_col = gui_to_int(
                self.res_num_col.GetValue())
            ds.relax_gui.free_file_format.res_name_col = gui_to_int(
                self.res_name_col.GetValue())
            ds.relax_gui.free_file_format.spin_num_col = gui_to_int(
                self.spin_num_col.GetValue())
            ds.relax_gui.free_file_format.spin_name_col = gui_to_int(
                self.spin_name_col.GetValue())

            # The data and error.
            if hasattr(self, 'data_col'):
                ds.relax_gui.free_file_format.data_col = gui_to_int(
                    self.data_col.GetValue())
            if hasattr(self, 'error_col'):
                ds.relax_gui.free_file_format.error_col = gui_to_int(
                    self.error_col.GetValue())

            # The column separator.
            ds.relax_gui.free_file_format.sep = str(self.sep.GetValue())
            if ds.relax_gui.free_file_format.sep == 'white space':
                ds.relax_gui.free_file_format.sep = None

        # The mini GUI element.
        elif self.element_type == 'mini':
            # Get the current values.
            settings = self.GetValue()

            # Store the values.
            ds.relax_gui.free_file_format.spin_id_col = settings['spin_id_col']
            ds.relax_gui.free_file_format.mol_name_col = settings[
                'mol_name_col']
            ds.relax_gui.free_file_format.res_num_col = settings['res_num_col']
            ds.relax_gui.free_file_format.res_name_col = settings[
                'res_name_col']
            ds.relax_gui.free_file_format.spin_num_col = settings[
                'spin_num_col']
            ds.relax_gui.free_file_format.spin_name_col = settings[
                'spin_name_col']
            if self.data_cols:
                ds.relax_gui.free_file_format.data_col = settings['data_col']
                ds.relax_gui.free_file_format.error_col = settings['error_col']
            ds.relax_gui.free_file_format.sep = settings['sep']
コード例 #5
0
ファイル: auto_rx_base.py プロジェクト: pombredanne/relax
    def assemble_data(self):
        """Assemble the data required for the auto-analysis.

        See the docstring for auto_analyses.relax_fit for details.  All data is taken from the relax data store, so data upload from the GUI to there must have been previously performed.

        @return:    A container with all the data required for the auto-analysis.
        @rtype:     class instance, list of str
        """

        # The data container.
        data = Container()
        missing = []

        # The pipe name and bundle.
        data.pipe_name = self.data.pipe_name
        data.pipe_bundle = self.data.pipe_bundle

        # The frequency.
        frq = gui_to_str(self.field_nmr_frq.GetValue())
        if frq == None:
            missing.append('NMR frequency')

        # File root.
        data.file_root = '%s.%s' % (self.analysis_type, frq)

        # Check if sequence data is loaded
        if not exists_mol_res_spin_data():
            missing.append("Sequence data")

        # Spectral data.
        if not hasattr(cdp, 'spectrum_ids') or len(cdp.spectrum_ids) < 3:
            missing.append("Spectral data")

        # Increment size.
        data.inc = gui_to_int(self.grid_inc.GetValue())

        # The number of Monte Carlo simulations to be used for error analysis at the end of the analysis.
        data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())

        # Results directory.
        data.save_dir = self.data.save_dir

        # Return the container and list of missing data.
        return data, missing
コード例 #6
0
    def assemble_data(self):
        """Assemble the data required for the auto-analysis.

        See the docstring for auto_analyses.relax_fit for details.  All data is taken from the relax data store, so data upload from the GUI to there must have been previously performed.

        @return:    A container with all the data required for the auto-analysis.
        @rtype:     class instance, list of str
        """

        # The data container.
        data = Container()
        missing = []

        # The pipe name and bundle.
        data.pipe_name = self.data.pipe_name
        data.pipe_bundle = self.data.pipe_bundle

        # The frequency.
        frq = gui_to_str(self.field_nmr_frq.GetValue())
        if frq == None:
            missing.append('NMR frequency')

        # File root.
        data.file_root = '%s.%s' % (self.analysis_type, frq)

        # Check if sequence data is loaded
        if not exists_mol_res_spin_data():
            missing.append("Sequence data")

        # Spectral data.
        if not hasattr(cdp, 'spectrum_ids') or len(cdp.spectrum_ids) < 3:
            missing.append("Spectral data")

        # Increment size.
        data.inc = gui_to_int(self.grid_inc.GetValue())

        # The number of Monte Carlo simulations to be used for error analysis at the end of the analysis.
        data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())

        # Results directory.
        data.save_dir = self.data.save_dir

        # Return the container and list of missing data.
        return data, missing
コード例 #7
0
ファイル: auto_model_free.py プロジェクト: bopopescu/relax
    def sync_ds(self, upload=False):
        """Synchronise the analysis frame and the relax data store, both ways.

        This method allows the frame information to be uploaded into the relax data store, or for the information in the relax data store to be downloaded by the frame.

        @keyword upload:    A flag which if True will cause the frame to send data to the relax data store.  If False, data will be downloaded from the relax data store to update the frame.
        @type upload:       bool
        """

        # The local tau_m models to use.
        if upload:
            self.data.local_tm_models = self.local_tm_model_field.GetValue()
        else:
            self.local_tm_model_field.set_value(self.data.local_tm_models)

        # The model-free models to use.
        if upload:
            self.data.mf_models = self.mf_model_field.GetValue()
        else:
            self.mf_model_field.set_value(self.data.mf_models)

        # The grid incs.
        if upload:
            self.data.grid_inc = gui_to_int(self.grid_inc.GetValue())
        elif hasattr(self.data, 'grid_inc'):
            self.grid_inc.SetValue(int(self.data.grid_inc))

        # The MC sim number.
        if upload:
            self.data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())
        elif hasattr(self.data, 'mc_sim_num'):
            self.mc_sim_num.SetValue(int(self.data.mc_sim_num))

        # The results directory.
        if upload:
            self.data.save_dir = str(self.field_results_dir.GetValue())
        else:
            self.field_results_dir.SetValue(str_to_gui(self.data.save_dir))

        # Maximum iterations.
        if upload:
            self.data.max_iter = gui_to_int(self.max_iter.GetValue())
        else:
            self.max_iter.SetValue(int(self.data.max_iter))
コード例 #8
0
    def sync_ds(self, upload=False):
        """Synchronise the analysis frame and the relax data store, both ways.

        This method allows the frame information to be uploaded into the relax data store, or for the information in the relax data store to be downloaded by the frame.

        @keyword upload:    A flag which if True will cause the frame to send data to the relax data store.  If False, data will be downloaded from the relax data store to update the frame.
        @type upload:       bool
        """

        # The local tau_m models to use.
        if upload:
            self.data.local_tm_models = self.local_tm_model_field.GetValue()
        else:
            self.local_tm_model_field.set_value(self.data.local_tm_models)

        # The model-free models to use.
        if upload:
            self.data.mf_models = self.mf_model_field.GetValue()
        else:
            self.mf_model_field.set_value(self.data.mf_models)

        # The grid incs.
        if upload:
            self.data.grid_inc = gui_to_int(self.grid_inc.GetValue())
        elif hasattr(self.data, 'grid_inc'):
            self.grid_inc.SetValue(int(self.data.grid_inc))

        # The MC sim number.
        if upload:
            self.data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())
        elif hasattr(self.data, 'mc_sim_num'):
            self.mc_sim_num.SetValue(int(self.data.mc_sim_num))

        # The results directory.
        if upload:
            self.data.save_dir = str(self.field_results_dir.GetValue())
        else:
            self.field_results_dir.SetValue(str_to_gui(self.data.save_dir))

        # Maximum iterations.
        if upload:
            self.data.max_iter = gui_to_int(self.max_iter.GetValue())
        else:
            self.max_iter.SetValue(int(self.data.max_iter))
コード例 #9
0
ファイル: free_file_format.py プロジェクト: bopopescu/relax
    def GetValue(self):
        """Return the free file format settings as a keyword dictionary.

        @return:    The dictionary of free file format settings.
        @rtype:     dict
        """

        # Initialise.
        settings = {}

        # The default GUI element.
        if self.element_type == 'default':
            # Get the column numbers.
            settings['spin_id_col'] = gui_to_int(self.spin_id_col.GetValue())
            settings['mol_name_col'] = gui_to_int(self.mol_name_col.GetValue())
            settings['res_num_col'] = gui_to_int(self.res_num_col.GetValue())
            settings['res_name_col'] = gui_to_int(self.res_name_col.GetValue())
            settings['spin_num_col'] = gui_to_int(self.spin_num_col.GetValue())
            settings['spin_name_col'] = gui_to_int(
                self.spin_name_col.GetValue())
            if self.data_cols:
                settings['data_col'] = gui_to_int(self.data_col.GetValue())
                settings['error_col'] = gui_to_int(self.error_col.GetValue())

            # The column separator.
            settings['sep'] = str(self.sep.GetValue())
            if settings['sep'] == 'white space':
                settings['sep'] = None

        # The mini GUI element.
        elif self.element_type == 'mini':
            # Convert the values.
            values = self.from_string(string=gui_to_str(self.field.GetValue()))

            # Store them.
            settings['spin_id_col'] = values[0]
            settings['mol_name_col'] = values[1]
            settings['res_num_col'] = values[2]
            settings['res_name_col'] = values[3]
            settings['spin_num_col'] = values[4]
            settings['spin_name_col'] = values[5]
            if self.data_cols:
                settings['data_col'] = values[6]
                settings['error_col'] = values[7]
                settings['sep'] = values[8]
            else:
                settings['sep'] = values[6]

        # Return the settings.
        return settings
コード例 #10
0
    def GetValue(self):
        """Return the free file format settings as a keyword dictionary.

        @return:    The dictionary of free file format settings.
        @rtype:     dict
        """

        # Initialise.
        settings = {}

        # The default GUI element.
        if self.element_type == 'default':
            # Get the column numbers.
            settings['spin_id_col'] =   gui_to_int(self.spin_id_col.GetValue())
            settings['mol_name_col'] =  gui_to_int(self.mol_name_col.GetValue())
            settings['res_num_col'] =   gui_to_int(self.res_num_col.GetValue())
            settings['res_name_col'] =  gui_to_int(self.res_name_col.GetValue())
            settings['spin_num_col'] =  gui_to_int(self.spin_num_col.GetValue())
            settings['spin_name_col'] = gui_to_int(self.spin_name_col.GetValue())
            if self.data_cols:
                settings['data_col'] =  gui_to_int(self.data_col.GetValue())
                settings['error_col'] = gui_to_int(self.error_col.GetValue())

            # The column separator.
            settings['sep'] = str(self.sep.GetValue())
            if settings['sep'] == 'white space':
                settings['sep'] = None

        # The mini GUI element.
        elif self.element_type == 'mini':
            # Convert the values.
            values = self.from_string(string=gui_to_str(self.field.GetValue()))

            # Store them.
            settings['spin_id_col'] = values[0]
            settings['mol_name_col'] = values[1]
            settings['res_num_col'] = values[2]
            settings['res_name_col'] = values[3]
            settings['spin_num_col'] = values[4]
            settings['spin_name_col'] = values[5]
            if self.data_cols:
                settings['data_col'] = values[6]
                settings['error_col'] = values[7]
                settings['sep'] = values[8]
            else:
                settings['sep'] = values[6]

        # Return the settings.
        return settings
コード例 #11
0
    def save(self, event):
        """Save the free file format widget contents into the relax data store.

        @param event:   The wx event.
        @type event:    wx event
        """

        # The default GUI element.
        if self.element_type == 'default':
            # Get the column numbers.
            ds.relax_gui.free_file_format.spin_id_col =   gui_to_int(self.spin_id_col.GetValue())
            ds.relax_gui.free_file_format.mol_name_col =  gui_to_int(self.mol_name_col.GetValue())
            ds.relax_gui.free_file_format.res_num_col =   gui_to_int(self.res_num_col.GetValue())
            ds.relax_gui.free_file_format.res_name_col =  gui_to_int(self.res_name_col.GetValue())
            ds.relax_gui.free_file_format.spin_num_col =  gui_to_int(self.spin_num_col.GetValue())
            ds.relax_gui.free_file_format.spin_name_col = gui_to_int(self.spin_name_col.GetValue())

            # The data and error.
            if hasattr(self, 'data_col'):
                ds.relax_gui.free_file_format.data_col = gui_to_int(self.data_col.GetValue())
            if hasattr(self, 'error_col'):
                ds.relax_gui.free_file_format.error_col = gui_to_int(self.error_col.GetValue())

            # The column separator.
            ds.relax_gui.free_file_format.sep = str(self.sep.GetValue())
            if ds.relax_gui.free_file_format.sep == 'white space':
                ds.relax_gui.free_file_format.sep = None

        # The mini GUI element.
        elif self.element_type == 'mini':
            # Get the current values.
            settings = self.GetValue()

            # Store the values.
            ds.relax_gui.free_file_format.spin_id_col =   settings['spin_id_col']
            ds.relax_gui.free_file_format.mol_name_col =  settings['mol_name_col']
            ds.relax_gui.free_file_format.res_num_col =   settings['res_num_col']
            ds.relax_gui.free_file_format.res_name_col =  settings['res_name_col']
            ds.relax_gui.free_file_format.spin_num_col =  settings['spin_num_col']
            ds.relax_gui.free_file_format.spin_name_col = settings['spin_name_col']
            if self.data_cols:
                ds.relax_gui.free_file_format.data_col = settings['data_col']
                ds.relax_gui.free_file_format.error_col = settings['error_col']
            ds.relax_gui.free_file_format.sep = settings['sep']
コード例 #12
0
ファイル: auto_relax_disp.py プロジェクト: pombredanne/relax
    def assemble_data(self):
        """Assemble the data required for the Auto_noe class.

        @return:    A container with all the data required for the auto-analysis, the missing list, and a list of models that don't match the experiment types.
        @rtype:     class instance, list of str, list of str
        """

        # The data container.
        data = Container()
        missing = []
        model_mismatch = []

        # The pipe name and bundle.
        data.pipe_name = self.data.pipe_name
        data.pipe_bundle = self.data.pipe_bundle

        # Results directories.
        data.save_dir = self.data.save_dir
        data.pre_run_dir = gui_to_str(self.field_pre_run_dir.GetValue())

        # Check if sequence data is loaded
        if not exists_mol_res_spin_data():
            missing.append("Sequence data")

        # Spin variables.
        for spin, spin_id in spin_loop(return_id=True, skip_desel=True):
            # The message skeleton.
            msg = "Spin '%s' - %s (try the %s user function)." % (spin_id, "%s", "%s")

            # Test if the nuclear isotope type has been set.
            if not hasattr(spin, 'isotope') or spin.isotope == None:
                missing.append(msg % ("nuclear isotope data", "spin.isotope"))

        # Spectral data.
        if not hasattr(cdp, 'spectrum_ids') or len(cdp.spectrum_ids) < 2:
            missing.append("Spectral data")

        # The dispersion models.
        data.models = self.model_field.GetValue()

        # Invalid models.
        for model in data.models:
            # Invalid CPMG models.
            if model != MODEL_NOREX and model in MODEL_LIST_CPMG and not has_cpmg_exp_type():
                model_mismatch.append([model, 'CPMG'])

            # Invalid R1rho models.
            if model != MODEL_NOREX and model in MODEL_LIST_R1RHO and not has_r1rho_exp_type():
                model_mismatch.append([model, 'R1rho'])

        # The R1 parameter fitting flag.
        data.r1_fit = self.r1_fit.GetValue()

        # The numeric only solution.
        data.numeric_only = self.numeric_only.GetValue()

        # Increment size.
        data.inc = gui_to_int(self.grid_inc.GetValue())

        # The number of Monte Carlo simulations to be used for error analysis at the end of the analysis.
        data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())
        data.exp_mc_sim_num = gui_to_int(self.exp_mc_sim_num.GetValue())
        data.mc_sim_all_models = self.mc_sim_all_models.GetValue()

        # The insignificance level.
        data.insignificance = self.insignificance.GetValue()
        try:
            data.insignificance = gui_to_float(data.insignificance)
        except:
            missing.append("The insignificance level must be a number.")

        # Optimisation precision.
        data.opt_func_tol = self.opt_func_tol
        data.opt_max_iterations = self.opt_max_iterations

        # Return the container, the list of missing data, and any models that don't match the experiment types.
        return data, missing, model_mismatch
コード例 #13
0
ファイル: auto_model_free.py プロジェクト: bopopescu/relax
    def assemble_data(self):
        """Assemble the data required for the auto-analysis.

        See the docstring for auto_analyses.dauvernge_protocol for details.  All data is taken from the relax data store, so data upload from the GUI to there must have been previously performed.

        @return:    A container with all the data required for the auto-analysis.
        @rtype:     class instance, list of str
        """

        # The data container.
        data = Container()
        missing = []

        # The pipe name and bundle.
        data.pipe_name = self.data.pipe_name
        data.pipe_bundle = self.data.pipe_bundle

        # The model-free models (do not change these unless absolutely necessary).
        data.local_tm_models = self.local_tm_model_field.GetValue()
        data.mf_models = self.mf_model_field.GetValue()

        # Automatic looping over all rounds until convergence (must be a boolean value of True or False).
        data.conv_loop = True

        # Increment size.
        data.inc = gui_to_int(self.grid_inc.GetValue())
        if hasattr(self.data, 'diff_tensor_grid_inc'):
            data.diff_tensor_grid_inc = self.data.diff_tensor_grid_inc
        else:
            data.diff_tensor_grid_inc = {
                'sphere': 11,
                'prolate': 11,
                'oblate': 11,
                'ellipsoid': 6
            }

        # The number of Monte Carlo simulations to be used for error analysis at the end of the analysis.
        data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())

        # Number of maximum iterations.
        data.max_iter = self.data.max_iter

        # Results directory.
        data.save_dir = self.data.save_dir

        # Check if sequence data is loaded
        if not exists_mol_res_spin_data():
            missing.append("Sequence data")

        # Relaxation data.
        if not hasattr(cdp, 'ri_ids') or len(cdp.ri_ids) == 0:
            missing.append("Relaxation data")

        # Insufficient data.
        if hasattr(cdp, 'ri_ids') and len(cdp.ri_ids) <= 3:
            missing.append(
                "Insufficient relaxation data, 4 or more data sets are essential for the execution of the dauvergne_protocol auto-analysis. Check that you have entered data for a least two spectrometer fields."
            )

        # Interatomic data containers.
        if not hasattr(cdp, 'interatomic') or len(cdp.interatomic) == 0:
            missing.append(
                "Interatomic data (for the dipole-dipole interaction)")

        # Get the mode.
        mode = gui_to_str(self.mode.GetValue())

        # Solve for all global models.
        if mode == 'Fully automated':
            # The global model list.
            data.global_models = [
                'local_tm', 'sphere', 'prolate', 'oblate', 'ellipsoid', 'final'
            ]

        # Any global model selected.
        else:
            data.global_models = [mode]

        # Check for vectors.
        vector_check = False
        if 'prolate' in data.global_models or 'oblate' in data.global_models or 'ellipsoid' in data.global_models:
            vector_check = True

        # Spin variables.
        for spin, spin_id in spin_loop(return_id=True):
            # Skip deselected spins.
            if not spin.select:
                continue

            # The message skeleton.
            msg = "Spin '%s' - %s (try the %s user function)." % (spin_id,
                                                                  "%s", "%s")

            # Test if the nuclear isotope type has been set.
            if not hasattr(spin, 'isotope') or spin.isotope == None:
                missing.append(msg % ("nuclear isotope data", "spin.isotope"))

            # Test if the CSA value has been set for the heteronuclei.
            if (hasattr(spin, 'isotope') and spin.isotope
                    in ['15N', '13C']) and (not hasattr(spin, 'csa')
                                            or spin.csa == None):
                missing.append(msg % ("CSA data", "value.set"))

        # Interatomic data container variables.
        for interatom in interatomic_loop():
            # Get the spin containers.
            spin1 = return_spin(spin_hash=interatom._spin_hash1)
            spin2 = return_spin(spin_hash=interatom._spin_hash2)

            # Skip deselected spins.
            if not spin1.select:
                continue
            if not spin2.select:
                continue

            # The message skeleton.
            msg = "Spin pair '%s' and '%s' - %s (try the %s user function)." % (
                interatom.spin_id1, interatom.spin_id2, "%s", "%s")

            # Test if the interatomic distance has been set.
            if not hasattr(interatom, 'r') or interatom.r == None:
                missing.append(msg % ("bond length data", "value.set"))

            # Test if the unit vectors have been loaded.
            if vector_check and (not hasattr(interatom, 'vector')
                                 or interatom.vector is None):
                missing.append(msg %
                               ("unit vectors", "interatom.unit_vectors"))

        # Return the container and list of missing data.
        return data, missing
コード例 #14
0
    def sync_ds(self, upload=False):
        """Synchronise the analysis frame and the relax data store, both ways.

        This method allows the frame information to be uploaded into the relax data store, or for the information in the relax data store to be downloaded by the frame.

        @keyword upload:    A flag which if True will cause the frame to send data to the relax data store.  If False, data will be downloaded from the relax data store to update the frame.
        @type upload:       bool
        """

        # The R1 parameter fitting flag.
        if upload:
            self.data.r1_fit = self.r1_fit.GetValue()
        elif hasattr(self.data, 'r1_fit'):
            self.r1_fit.SetValue(bool(self.data.r1_fit))

        # The numeric solution only flag.
        if upload:
            self.data.numeric_only = self.numeric_only.GetValue()
        elif hasattr(self.data, 'numeric_only'):
            self.numeric_only.SetValue(bool(self.data.numeric_only))

        # The grid incs.
        if upload:
            self.data.grid_inc = gui_to_int(self.grid_inc.GetValue())
        elif hasattr(self.data, 'grid_inc'):
            if self.data.grid_inc == None:
                self.data.grid_inc = default_grid_inc
            self.grid_inc.SetValue(int(self.data.grid_inc))

        # The MC sim number.
        if upload:
            self.data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())
        elif hasattr(self.data, 'mc_sim_num'):
            if self.data.mc_sim_num == None:
                self.data.mc_sim_num = default_mc_sim_num
            self.mc_sim_num.SetValue(int(self.data.mc_sim_num))

        # The EXP MC sim number.
        if upload:
            self.data.exp_mc_sim_num = gui_to_int(self.exp_mc_sim_num.GetValue())
        elif hasattr(self.data, 'exp_mc_sim_num'):
            if self.data.exp_mc_sim_num == None:
                self.data.exp_mc_sim_num = default_exp_mc_sim_num
            self.exp_mc_sim_num.SetValue(int(self.data.exp_mc_sim_num))

        # The All model MC sim flag.
        if upload:
            self.data.mc_sim_all_models = self.mc_sim_all_models.GetValue()
        elif hasattr(self.data, 'mc_sim_all_models'):
            self.mc_sim_all_models.SetValue(bool(self.data.mc_sim_all_models))

        # The insignificance level.
        if upload:
            self.data.insignificance = self.insignificance.GetValue()
            try:
                self.data.insignificance = gui_to_float(self.data.insignificance)
            except:
                pass
        elif hasattr(self.data, 'insignificance'):
            self.insignificance.SetValue(float_to_gui(self.data.insignificance))

        # The results directory.
        if upload:
            self.data.save_dir = gui_to_str(self.field_results_dir.GetValue())
        else:
            self.field_results_dir.SetValue(str_to_gui(self.data.save_dir))

        # The previous run results directory.
        if upload:
            self.data.pre_run_dir = gui_to_str(self.field_pre_run_dir.GetValue())
        elif hasattr(self.data, 'pre_run_dir'):
            self.field_pre_run_dir.SetValue(str_to_gui(self.data.pre_run_dir))

        # The models to use.
        if upload:
            self.data.disp_models = self.model_field.GetValue()
        else:
            self.model_field.set_value(self.data.disp_models)
コード例 #15
0
    def assemble_data(self):
        """Assemble the data required for the Auto_noe class.

        @return:    A container with all the data required for the auto-analysis, the missing list, and a list of models that don't match the experiment types.
        @rtype:     class instance, list of str, list of str
        """

        # The data container.
        data = Container()
        missing = []
        model_mismatch = []

        # The pipe name and bundle.
        data.pipe_name = self.data.pipe_name
        data.pipe_bundle = self.data.pipe_bundle

        # Results directories.
        data.save_dir = self.data.save_dir
        data.pre_run_dir = gui_to_str(self.field_pre_run_dir.GetValue())

        # Check if sequence data is loaded
        if not exists_mol_res_spin_data():
            missing.append("Sequence data")

        # Spin variables.
        for spin, spin_id in spin_loop(return_id=True, skip_desel=True):
            # The message skeleton.
            msg = "Spin '%s' - %s (try the %s user function)." % (spin_id, "%s", "%s")

            # Test if the nuclear isotope type has been set.
            if not hasattr(spin, 'isotope') or spin.isotope == None:
                missing.append(msg % ("nuclear isotope data", "spin.isotope"))

        # Spectral data.
        if not hasattr(cdp, 'spectrum_ids') or len(cdp.spectrum_ids) < 2:
            missing.append("Spectral data")

        # The dispersion models.
        data.models = self.model_field.GetValue()

        # Invalid models.
        for model in data.models:
            # Invalid CPMG models.
            if model != MODEL_NOREX and model in MODEL_LIST_CPMG and not has_cpmg_exp_type():
                model_mismatch.append([model, 'CPMG'])

            # Invalid R1rho models.
            if model != MODEL_NOREX and model in MODEL_LIST_R1RHO and not has_r1rho_exp_type():
                model_mismatch.append([model, 'R1rho'])

        # The R1 parameter fitting flag.
        data.r1_fit = self.r1_fit.GetValue()

        # The numeric only solution.
        data.numeric_only = self.numeric_only.GetValue()

        # Increment size.
        data.inc = gui_to_int(self.grid_inc.GetValue())

        # The number of Monte Carlo simulations to be used for error analysis at the end of the analysis.
        data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())
        data.exp_mc_sim_num = gui_to_int(self.exp_mc_sim_num.GetValue())
        data.mc_sim_all_models = self.mc_sim_all_models.GetValue()

        # The insignificance level.
        data.insignificance = self.insignificance.GetValue()
        try:
            data.insignificance = gui_to_float(data.insignificance)
        except:
            missing.append("The insignificance level must be a number.")

        # Optimisation precision.
        data.opt_func_tol = self.opt_func_tol
        data.opt_max_iterations = self.opt_max_iterations

        # Return the container, the list of missing data, and any models that don't match the experiment types.
        return data, missing, model_mismatch
コード例 #16
0
    def assemble_data(self):
        """Assemble the data required for the auto-analysis.

        See the docstring for auto_analyses.dauvernge_protocol for details.  All data is taken from the relax data store, so data upload from the GUI to there must have been previously performed.

        @return:    A container with all the data required for the auto-analysis.
        @rtype:     class instance, list of str
        """

        # The data container.
        data = Container()
        missing = []

        # The pipe name and bundle.
        data.pipe_name = self.data.pipe_name
        data.pipe_bundle = self.data.pipe_bundle

        # The model-free models (do not change these unless absolutely necessary).
        data.local_tm_models = self.local_tm_model_field.GetValue()
        data.mf_models = self.mf_model_field.GetValue()

        # Automatic looping over all rounds until convergence (must be a boolean value of True or False).
        data.conv_loop = True

        # Increment size.
        data.inc = gui_to_int(self.grid_inc.GetValue())
        if hasattr(self.data, 'diff_tensor_grid_inc'):
            data.diff_tensor_grid_inc = self.data.diff_tensor_grid_inc
        else:
            data.diff_tensor_grid_inc = {'sphere': 11, 'prolate': 11, 'oblate': 11, 'ellipsoid': 6}

        # The number of Monte Carlo simulations to be used for error analysis at the end of the analysis.
        data.mc_sim_num = gui_to_int(self.mc_sim_num.GetValue())

        # Number of maximum iterations.
        data.max_iter = self.data.max_iter

        # Results directory.
        data.save_dir = self.data.save_dir

        # Check if sequence data is loaded
        if not exists_mol_res_spin_data():
            missing.append("Sequence data")

        # Relaxation data.
        if not hasattr(cdp, 'ri_ids') or len(cdp.ri_ids) == 0:
            missing.append("Relaxation data")

        # Insufficient data.
        if hasattr(cdp, 'ri_ids') and len(cdp.ri_ids) <= 3:
            missing.append("Insufficient relaxation data, 4 or more data sets are essential for the execution of the dauvergne_protocol auto-analysis. Check that you have entered data for a least two spectrometer fields.")

        # Interatomic data containers.
        if not hasattr(cdp, 'interatomic') or len(cdp.interatomic) == 0:
            missing.append("Interatomic data (for the dipole-dipole interaction)")

        # Get the mode.
        mode = gui_to_str(self.mode.GetValue())

        # Solve for all global models.
        if mode == 'Fully automated':
            # The global model list.
            data.global_models = ['local_tm', 'sphere', 'prolate', 'oblate', 'ellipsoid', 'final']

        # Any global model selected.
        else:
            data.global_models = [mode]

        # Check for vectors.
        vector_check = False
        if 'prolate' in data.global_models or 'oblate' in data.global_models or 'ellipsoid' in data.global_models:
            vector_check = True

        # Spin variables.
        for spin, spin_id in spin_loop(return_id=True):
            # Skip deselected spins.
            if not spin.select:
                continue

            # The message skeleton.
            msg = "Spin '%s' - %s (try the %s user function)." % (spin_id, "%s", "%s")

            # Test if the nuclear isotope type has been set.
            if not hasattr(spin, 'isotope') or spin.isotope == None:
                missing.append(msg % ("nuclear isotope data", "spin.isotope"))

            # Test if the CSA value has been set for the heteronuclei.
            if (hasattr(spin, 'isotope') and spin.isotope in ['15N', '13C']) and (not hasattr(spin, 'csa') or spin.csa == None):
                missing.append(msg % ("CSA data", "value.set"))

        # Interatomic data container variables.
        for interatom in interatomic_loop():
            # Get the spin containers.
            spin1 = return_spin(interatom.spin_id1)
            spin2 = return_spin(interatom.spin_id2)

            # Skip deselected spins.
            if not spin1.select:
                continue
            if not spin2.select:
                continue

            # The message skeleton.
            msg = "Spin pair '%s' and '%s' - %s (try the %s user function)." % (interatom.spin_id1, interatom.spin_id2, "%s", "%s")

            # Test if the interatomic distance has been set.
            if not hasattr(interatom, 'r') or interatom.r == None:
                missing.append(msg % ("bond length data", "value.set"))

            # Test if the unit vectors have been loaded.
            if vector_check and (not hasattr(interatom, 'vector') or interatom.vector == None):
                missing.append(msg % ("unit vectors", "interatom.unit_vectors"))

        # Return the container and list of missing data.
        return data, missing