Exemple #1
0
    def get_state(self):
        """
            Returns an object with the state of the interface
        """
        m = self.get_editing_state()
        state = DataSeries(data_class=REFLDataSets)
        state_list = []
        
        # Common Q binning
        q_min = float(self._summary.q_min_edit.text())
        q_step = float(self._summary.q_step_edit.text())
        if self._summary.log_scale_chk.isChecked():
            q_step = -q_step
            
        # Angle offset
        if hasattr(m, "angle_offset"):
            angle_offset = float(self._summary.angle_offset_edit.text())
            angle_offset_error = float(self._summary.angle_offset_error_edit.text())
                
        for i in range(self._summary.angle_list.count()):
            data = self._summary.angle_list.item(i).data(QtCore.Qt.UserRole).toPyObject()
            # Over-write Q binning with common binning
            data.q_min = q_min
            data.q_step = q_step
        
            # Over-write angle offset
            if hasattr(data, "angle_offset"):
                data.angle_offset = angle_offset
                data.angle_offset_error = angle_offset_error

            state_list.append(data)
        state.data_sets = state_list
        
        return state    
Exemple #2
0
    def __init__(self,
                 parent=None,
                 state=None,
                 settings=None,
                 name="REFL",
                 data_proxy=None):
        super(DataReflWidget, self).__init__(parent,
                                             state,
                                             settings,
                                             data_proxy=data_proxy)

        class SummaryFrame(QtGui.QFrame,
                           ui.reflectometer.ui_data_refl_simple.Ui_Frame):
            def __init__(self, parent=None):
                QtGui.QFrame.__init__(self, parent)
                self.setupUi(self)

        self.short_name = name
        self._settings.instrument_name = name

        self._summary = SummaryFrame(self)
        self.initialize_content()
        self._layout.addWidget(self._summary)

        if state is not None:
            self.set_state(state)
        else:
            self.set_state(DataSeries(data_class=REFLDataSets))
Exemple #3
0
    def _read_template(self, sequence_number):
        """
            Read template from file.
            @param sequence_number: the ID of the data set within the sequence of runs
        """
        template_file = self.getProperty("TemplateFile").value
        fd = open(template_file, "r")
        xml_str = fd.read()
        s = DataSeries()
        s.from_xml(xml_str)

        if len(s.data_sets) >= sequence_number:
            data_set = s.data_sets[sequence_number - 1]
        elif len(s.data_sets) > 0:
            data_set = s.data_sets[0]
        else:
            raise RuntimeError, "Invalid reduction template"

        self.data_series_template = s

        return data_set
Exemple #4
0
    def _read_template(self, sequence_number):
        """
            Read template from file.
            @param sequence_number: the ID of the data set within the sequence of runs
        """
        template_file = self.getProperty("TemplateFile").value
        fd = open(template_file, "r")
        xml_str = fd.read()
        s = DataSeries()
        s.from_xml(xml_str)

        if len(s.data_sets) >= sequence_number:
            data_set = s.data_sets[sequence_number - 1]
        elif len(s.data_sets) > 0:
            data_set = s.data_sets[0]
        else:
            raise RuntimeError("Invalid reduction template")

        self.data_series_template = s

        return data_set
Exemple #5
0
    def get_state(self):
        """
            Returns an object with the state of the interface
        """
        m = self.get_editing_state()
        state = DataSeries(data_class=REFMDataSets)
        state_list = []

        # Common Q binning
        q_min = float(self._summary.q_min_edit.text())
        q_step = float(self._summary.q_step_edit.text())
        q_bins = int(math.ceil(float(self._summary.q_step_edit.text())))

        # Angle offset
        if hasattr(m, "angle_offset"):
            angle_offset = float(self._summary.angle_offset_edit.text())
            angle_offset_error = float(self._summary.angle_offset_error_edit.text())

        for i in range(self._summary.angle_list.count()):
            data = self._summary.angle_list.item(i).data(QtCore.Qt.UserRole)
            # Over-write Q binning with common binning
            data.q_min = q_min
            data.q_step = q_step

            # Over-write angle offset
            if hasattr(data, "angle_offset"):
                data.angle_offset = angle_offset
                data.angle_offset_error = angle_offset_error

            if hasattr(data, "q_bins"):
                data.q_bins = q_bins
                data.q_log = self._summary.log_scale_chk.isChecked()

            if hasattr(data, "output_dir"):
                data.output_dir = self._summary.outdir_edit.text()

            state_list.append(data)
        state.data_sets = state_list

        return state
Exemple #6
0
    def get_state(self):
        """
            Returns an object with the state of the interface
        """
        m = self.get_editing_state()
        state = DataSeries(data_class=REFLDataSets)
        state_list = []

        # Common Q binning
        q_min = float(self._summary.q_min_edit.text())
        q_step = float(self._summary.q_step_edit.text())
        if self._summary.log_scale_chk.isChecked():
            q_step = -q_step

        # Scaling factor file
#         data.scaling_factor_file_flag = self._summary.use_sf_config_switch.isChecked()

# Angle offset
        if hasattr(m, "angle_offset"):
            angle_offset = float(self._summary.angle_offset_edit.text())
            angle_offset_error = float(
                self._summary.angle_offset_error_edit.text())

        for i in range(self._summary.angle_list.count()):
            data = self._summary.angle_list.item(i).data(QtCore.Qt.UserRole)
            # Over-write Q binning with common binning
            data.q_min = q_min
            data.q_step = q_step

            # Over-write angle offset
            if hasattr(data, "angle_offset"):
                data.angle_offset = angle_offset
                data.angle_offset_error = angle_offset_error

            state_list.append(data)
        state.data_sets = state_list

        return state
Exemple #7
0
    def __init__(self, parent=None, state=None, settings=None, name="REFM", data_proxy=None):
        super(DataReflWidget, self).__init__(parent, state, settings, data_proxy=data_proxy)

        class SummaryFrame(QtGui.QFrame, ui.reflectometer.ui_refm_reduction.Ui_Frame):
            def __init__(self, parent=None):
                QtGui.QFrame.__init__(self, parent)
                self.setupUi(self)

        self.short_name = name
        self._settings.instrument_name = name

        self._summary = SummaryFrame(self)
        self.initialize_content()
        self._layout.addWidget(self._summary)

        self._detector_distance = 1.0
        self._sangle_parameter = 0.0

        if state is not None:
            self.set_state(state)
        else:
            self.set_state(DataSeries(data_class=REFMDataSets))
Exemple #8
0
    def _create_template(self, run_number, first_run_of_set, sequence_number):
        """
            Create a new template according to the meta-data
            @param run_number: run number according to the data file name
            @param first_run_of_set: first run in the sequence (sequence ID)
            @param sequence_number: the ID of the data set within the sequence of runs
        """
        # If so, load it and only overwrite the part we are dealing with here.
        template_file = self._get_output_template_path(first_run_of_set)
        if os.path.isfile(template_file):
            fd = open(template_file, "r")
            xml_str = fd.read()
            s = DataSeries()
            s.from_xml(xml_str)
        else:
            s = DataSeries()

        # Now we have an initial template
        self.data_series_template = s

        # Get the TOF range
        tof_range = self._get_tof_range()

        # TODO: sync up names with new DAS
        # Get information from meta-data
        meta_data_run = self.event_data.getRun()
        incident_medium = self._read_property(meta_data_run, "incident_medium", "medium")
        q_min = self._read_property(meta_data_run, "output_q_min", 0.001)
        q_step = -abs(self._read_property(meta_data_run, "output_q_step", 0.02))
        dQ_constant = self._read_property(meta_data_run, "dq_constant", 0.004)
        dQ_slope = self._read_property(meta_data_run, "dq_slope", 0.02)
        angle_offset = self._read_property(meta_data_run, "angle_offset", 0.016)
        angle_offset_err = self._read_property(meta_data_run, "angle_offset_error", 0.001)
        sf_file = self._read_property(meta_data_run, "scaling_factor_file", "/SNS/REF_L/shared/sf.txt")
        if len(sf_file.strip()) == 0:
            logger.error("No scaling factor file supplied")

        def _new_data_set():
            d = DataSets()
            d.NormFlag = True
            d.DataBackgroundFlag = True
            d.data_x_range_flag = True
            d.norm_x_range_flag = True
            d.DataTofRange = tof_range
            d.NormBackgroundFlag = True
            d.slits_width_flag = True
            d.incident_medium_list = [incident_medium]
            d.incident_medium_index_selected = 0
            d.angle_offset = angle_offset
            d.angle_offset_error = angle_offset_err

            d.q_min = q_min
            d.q_step = q_step
            d.fourth_column_dq0 = dQ_constant
            d.fourth_column_dq_over_q = dQ_slope
            d.scaling_factor_file = sf_file
            return d

        # Copy over the existing series, up to the point we are at
        new_data_sets = []
        # First, copy over the entries in the existing template, up to the point previous to the current point
        for i in range(min(int(run_number) - int(first_run_of_set), len(s.data_sets))):
            sequence_id = int(first_run_of_set) + i
            logger.information("Copying %s" % sequence_id)
            d = s.data_sets[i]
            d.data_files = [sequence_id]
            new_data_sets.append(d)

        running_id = len(new_data_sets)
        # Pad the items between what we have and the current point
        for i in range(running_id, int(run_number) - int(first_run_of_set) + 1):
            sequence_id = int(first_run_of_set) + i
            logger.information("Adding %s" % sequence_id)
            d = _new_data_set()
            d.data_files = [sequence_id]
            new_data_sets.append(d)

        self.data_series_template.data_sets = new_data_sets

        data_set = self.data_series_template.data_sets[sequence_number - 1]

        # Find direct beam peaks
        self._get_direct_beam(meta_data_run, data_set)

        return data_set
Exemple #9
0
    def _create_template(self, run_number, first_run_of_set, sequence_number):
        """
            Create a new template according to the meta-data
            @param run_number: run number according to the data file name
            @param first_run_of_set: first run in the sequence (sequence ID)
            @param sequence_number: the ID of the data set within the sequence of runs
        """
        # If so, load it and only overwrite the part we are dealing with here.
        template_file = self._get_output_template_path(first_run_of_set)
        if os.path.isfile(template_file):
            logger.notice("Writing template: %s" % template_file)
            fd = open(template_file, "r")
            xml_str = fd.read()
            s = DataSeries()
            s.from_xml(xml_str)
        else:
            s = DataSeries()

        # Now we have an initial template
        self.data_series_template = s

        # Get the TOF range
        tof_range = self._get_tof_range()

        # Get information from meta-data
        meta_data_run = self.event_data.getRun()
        _incident_medium = self.getProperty("IncidentMedium").value
        incident_medium = self._read_property(meta_data_run, "incident_medium",
                                              _incident_medium, is_string=True)

        q_min = self._read_property(meta_data_run, "output_q_min", 0.001)
        q_step = -abs(self._read_property(meta_data_run, "output_q_step", 0.02))
        dQ_constant = self._read_property(meta_data_run, "dq_constant", 0.004)
        dQ_slope = self._read_property(meta_data_run, "dq_slope", 0.02)
        angle_offset = self._read_property(meta_data_run, "angle_offset", 0.016)
        angle_offset_err = self._read_property(meta_data_run, "angle_offset_error", 0.001)

        _primary_range = self.getProperty("PrimaryFractionRange").value
        _primary_min = int(_primary_range[0])
        _primary_max = int(_primary_range[1])
        # The DAS logs are all stored as floats, but we are expecting an integer
        primary_min = math.trunc(float(self._read_property(meta_data_run, "primary_range_min", _primary_min)))
        primary_max = math.trunc(float(self._read_property(meta_data_run, "primary_range_max", _primary_max)))

        _sf_file = self.getProperty("ScalingFactorFile").value
        sf_file = self._read_property(meta_data_run, "scaling_factor_file",
                                      _sf_file, is_string=True)

        def _new_data_set():
            d = DataSets()
            d.NormFlag = True
            d.DataBackgroundFlag = True
            d.data_x_range_flag = True
            d.norm_x_range_flag = True
            d.DataTofRange = tof_range
            d.NormBackgroundFlag = True
            d.slits_width_flag = True
            d.incident_medium_list = [incident_medium]
            d.incident_medium_index_selected = 0
            d.angle_offset = angle_offset
            d.angle_offset_error = angle_offset_err
            d.clocking_from = primary_min
            d.clocking_to = primary_max
            d.q_min = q_min
            d.q_step = q_step
            d.fourth_column_dq0 = dQ_constant
            d.fourth_column_dq_over_q = dQ_slope
            d.scaling_factor_file = sf_file
            return d

        # Copy over the existing series, up to the point we are at
        new_data_sets = []
        # First, copy over the entries in the existing template,
        # up to the point previous to the current point
        for i in range(min(int(run_number) - int(first_run_of_set), len(s.data_sets))):
            sequence_id = int(first_run_of_set) + i
            logger.information("Copying %s" % sequence_id)
            d = s.data_sets[i]
            d.data_files = [sequence_id]
            new_data_sets.append(d)

        running_id = len(new_data_sets)
        # Pad the items between what we have and the current point
        for i in range(running_id, int(run_number) - int(first_run_of_set) + 1):
            sequence_id = int(first_run_of_set) + i
            logger.information("Adding %s" % sequence_id)
            d = _new_data_set()
            d.data_files = [sequence_id]
            new_data_sets.append(d)

        self.data_series_template.data_sets = new_data_sets

        data_set = self.data_series_template.data_sets[sequence_number - 1]

        # Find direct beam peaks
        self._get_direct_beam(meta_data_run, data_set)

        return data_set
Exemple #10
0
    def _create_template(self, run_number, first_run_of_set, sequence_number):
        """
            Create a new template according to the meta-data
            @param run_number: run number according to the data file name
            @param first_run_of_set: first run in the sequence (sequence ID)
            @param sequence_number: the ID of the data set within the sequence of runs
        """
        # If so, load it and only overwrite the part we are dealing with here.
        template_file = self._get_output_template_path(first_run_of_set)
        if os.path.isfile(template_file):
            logger.notice("Writing template: %s" % template_file)
            fd = open(template_file, "r")
            xml_str = fd.read()
            s = DataSeries()
            s.from_xml(xml_str)
        else:
            s = DataSeries()

        # Now we have an initial template
        self.data_series_template = s

        # Get the TOF range
        tof_range = self._get_tof_range()

        # Get information from meta-data
        meta_data_run = self.event_data.getRun()
        _incident_medium = self.getProperty("IncidentMedium").value
        incident_medium = self._read_property(meta_data_run,
                                              "incident_medium",
                                              _incident_medium,
                                              is_string=True)

        q_min = self._read_property(meta_data_run, "output_q_min", 0.001)
        q_step = -abs(self._read_property(meta_data_run, "output_q_step",
                                          0.02))
        dQ_constant = self._read_property(meta_data_run, "dq_constant", 0.004)
        dQ_slope = self._read_property(meta_data_run, "dq_slope", 0.02)
        angle_offset = self._read_property(meta_data_run, "angle_offset",
                                           0.016)
        angle_offset_err = self._read_property(meta_data_run,
                                               "angle_offset_error", 0.001)

        _primary_range = self.getProperty("PrimaryFractionRange").value
        _primary_min = int(_primary_range[0])
        _primary_max = int(_primary_range[1])
        # The DAS logs are all stored as floats, but we are expecting an integer
        primary_min = math.trunc(
            float(
                self._read_property(meta_data_run, "primary_range_min",
                                    _primary_min)))
        primary_max = math.trunc(
            float(
                self._read_property(meta_data_run, "primary_range_max",
                                    _primary_max)))

        _sf_file = self.getProperty("ScalingFactorFile").value
        sf_file = self._read_property(meta_data_run,
                                      "scaling_factor_file",
                                      _sf_file,
                                      is_string=True)

        def _new_data_set():
            d = DataSets()
            d.NormFlag = True
            d.DataBackgroundFlag = True
            d.data_x_range_flag = True
            d.norm_x_range_flag = True
            d.DataTofRange = tof_range
            d.NormBackgroundFlag = True
            d.slits_width_flag = True
            d.incident_medium_list = [incident_medium]
            d.incident_medium_index_selected = 0
            d.angle_offset = angle_offset
            d.angle_offset_error = angle_offset_err
            d.clocking_from = primary_min
            d.clocking_to = primary_max
            d.q_min = q_min
            d.q_step = q_step
            d.fourth_column_dq0 = dQ_constant
            d.fourth_column_dq_over_q = dQ_slope
            d.scaling_factor_file = sf_file
            return d

        # Copy over the existing series, up to the point we are at
        new_data_sets = []
        # First, copy over the entries in the existing template,
        # up to the point previous to the current point
        for i in range(
                min(int(run_number) - int(first_run_of_set),
                    len(s.data_sets))):
            sequence_id = int(first_run_of_set) + i
            logger.information("Copying %s" % sequence_id)
            d = s.data_sets[i]
            d.data_files = [sequence_id]
            new_data_sets.append(d)

        running_id = len(new_data_sets)
        # Pad the items between what we have and the current point
        for i in range(running_id,
                       int(run_number) - int(first_run_of_set) + 1):
            sequence_id = int(first_run_of_set) + i
            logger.information("Adding %s" % sequence_id)
            d = _new_data_set()
            d.data_files = [sequence_id]
            new_data_sets.append(d)

        self.data_series_template.data_sets = new_data_sets

        data_set = self.data_series_template.data_sets[sequence_number - 1]

        # Find direct beam peaks
        self._get_direct_beam(meta_data_run, data_set)

        return data_set
Exemple #11
0
 def __init__(self):
     from reduction_gui.reduction.reflectometer.refl_data_script import DataSets as REFLDataSets
     from reduction_gui.reduction.reflectometer.refl_data_series import DataSeries
     self._state = DataSeries(data_class=REFLDataSets)
Exemple #12
0
    def _create_template(self, run_number, first_run_of_set, sequence_number):
        """
            Create a new template according to the meta-data
            @param run_number: run number according to the data file name
            @param first_run_of_set: first run in the sequence (sequence ID)
            @param sequence_number: the ID of the data set within the sequence of runs
        """
        # If so, load it and only overwrite the part we are dealing with here.
        template_file = self._get_output_template_path(first_run_of_set)
        if os.path.isfile(template_file):
            fd = open(template_file, "r")
            xml_str = fd.read()
            s = DataSeries()
            s.from_xml(xml_str)
        else:
            s = DataSeries()

        # Now we have an initial template
        self.data_series_template = s

        # Get the TOF range
        tof_range = self._get_tof_range()

        # TODO: sync up names with new DAS
        # Get information from meta-data
        meta_data_run = self.event_data.getRun()
        incident_medium = self._read_property(meta_data_run, "incident_medium",
                                              "medium")
        q_min = self._read_property(meta_data_run, "output_q_min", 0.001)
        q_step = -abs(self._read_property(meta_data_run, "output_q_step",
                                          0.02))
        dQ_constant = self._read_property(meta_data_run, "dq_constant", 0.004)
        dQ_slope = self._read_property(meta_data_run, "dq_slope", 0.02)
        angle_offset = self._read_property(meta_data_run, "angle_offset",
                                           0.016)
        angle_offset_err = self._read_property(meta_data_run,
                                               "angle_offset_error", 0.001)
        sf_file = self._read_property(meta_data_run, "scaling_factor_file",
                                      "/SNS/REF_L/shared/sf.txt")
        if len(sf_file.strip()) == 0:
            logger.error("No scaling factor file supplied")

        def _new_data_set():
            d = DataSets()
            d.NormFlag = True
            d.DataBackgroundFlag = True
            d.data_x_range_flag = True
            d.norm_x_range_flag = True
            d.DataTofRange = tof_range
            d.NormBackgroundFlag = True
            d.slits_width_flag = True
            d.incident_medium_list = [incident_medium]
            d.incident_medium_index_selected = 0
            d.angle_offset = angle_offset
            d.angle_offset_error = angle_offset_err

            d.q_min = q_min
            d.q_step = q_step
            d.fourth_column_dq0 = dQ_constant
            d.fourth_column_dq_over_q = dQ_slope
            d.scaling_factor_file = sf_file
            return d

        # Copy over the existing series, up to the point we are at
        new_data_sets = []
        # First, copy over the entries in the existing template, up to the point previous to the current point
        for i in range(
                min(int(run_number) - int(first_run_of_set),
                    len(s.data_sets))):
            sequence_id = int(first_run_of_set) + i
            logger.information("Copying %s" % sequence_id)
            d = s.data_sets[i]
            d.data_files = [sequence_id]
            new_data_sets.append(d)

        running_id = len(new_data_sets)
        # Pad the items between what we have and the current point
        for i in range(running_id,
                       int(run_number) - int(first_run_of_set) + 1):
            sequence_id = int(first_run_of_set) + i
            logger.information("Adding %s" % sequence_id)
            d = _new_data_set()
            d.data_files = [sequence_id]
            new_data_sets.append(d)

        self.data_series_template.data_sets = new_data_sets

        data_set = self.data_series_template.data_sets[sequence_number - 1]

        # Find direct beam peaks
        self._get_direct_beam(meta_data_run, data_set)

        return data_set