Exemplo n.º 1
0
 def loader(runs):
     return load_files(runs,
                       ipf_filename,
                       minimum_spectrum,
                       maximum_spectrum,
                       load_logs=load_logs,
                       load_opts=load_opts)
    def _load_and_scale_container(self, scale_factor, load_opts):
        """
        Loads the container file if given
        Applies the scale factor to the container if not 1.
        """
        if self._container_data_files is not None:
            self._container_workspace, _ = load_files(self._container_data_files,
                                                      self._ipf_filename,
                                                      self._spectra_range[0],
                                                      self._spectra_range[1],
                                                      sum_files=True,
                                                      load_logs=self._load_logs,
                                                      load_opts=load_opts)
            self._container_workspace = self._container_workspace[0]

            # Scale container if factor is given
            if scale_factor != 1.0:
                Scale(InputWorkspace=self._container_workspace,
                      OutputWorkspace=self._container_workspace,
                      Factor=scale_factor,
                      Operation='Multiply')
    def _load_and_scale_container(self, scale_factor, load_opts):
        """
        Loads the container file if given
        Applies the scale factor to the container if not 1.
        """
        if self._container_data_files is not None:
            self._container_workspace, _ = load_files(self._container_data_files,
                                                      self._ipf_filename,
                                                      self._spectra_range[0],
                                                      self._spectra_range[1],
                                                      sum_files=True,
                                                      load_logs=self._load_logs,
                                                      load_opts=load_opts)
            self._container_workspace = self._container_workspace[0]

            # Scale container if factor is given
            if scale_factor != 1.0:
                Scale(InputWorkspace=self._container_workspace,
                      OutputWorkspace=self._container_workspace,
                      Factor=scale_factor,
                      Operation='Multiply')
 def loader(runs):
     return load_files(runs, ipf_filename, minimum_spectrum, maximum_spectrum,
                       load_logs=load_logs, load_opts=load_opts)
Exemplo n.º 5
0
    def PyExec(self):
        from IndirectReductionCommon import (
            load_files, get_multi_frame_rebin, identify_bad_detectors,
            unwrap_monitor, process_monitor_efficiency, scale_monitor,
            scale_detectors, rebin_reduction, group_spectra, fold_chopped,
            rename_reduction)

        self._setup()
        load_prog = Progress(self, start=0.0, end=0.10, nreports=2)
        load_prog.report('loading files')
        self._workspace_names, self._chopped_data = load_files(
            self._data_files, self._ipf_filename, self._spectra_range[0],
            self._spectra_range[1], self._sum_files, self._load_logs)
        load_prog.report('files loaded')

        process_prog = Progress(self,
                                start=0.1,
                                end=0.9,
                                nreports=len(self._workspace_names))
        for c_ws_name in self._workspace_names:
            process_prog.report('processing workspace' + c_ws_name)
            is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)

            # Get list of workspaces
            if is_multi_frame:
                workspaces = mtd[c_ws_name].getNames()
            else:
                workspaces = [c_ws_name]

            # Process rebinning for framed data
            rebin_string_2, num_bins = get_multi_frame_rebin(
                c_ws_name, self._rebin_string)

            masked_detectors = identify_bad_detectors(workspaces[0])

            # Process workspaces
            for ws_name in workspaces:
                # Set Efixed if given to algorithm
                if self._efixed != Property.EMPTY_DBL:
                    SetInstrumentParameter(Workspace=ws_name,
                                           ComponentName=self._analyser,
                                           ParameterName='Efixed',
                                           ParameterType='Number',
                                           Value=str(self._efixed))

                monitor_ws_name = ws_name + '_mon'

                # Process monitor
                if not unwrap_monitor(ws_name):
                    ConvertUnits(InputWorkspace=monitor_ws_name,
                                 OutputWorkspace=monitor_ws_name,
                                 Target='Wavelength',
                                 EMode='Elastic')

                process_monitor_efficiency(ws_name)
                scale_monitor(ws_name)

                # Do background removal if a range was provided
                if self._background_range is not None:
                    ConvertToDistribution(Workspace=ws_name)
                    CalculateFlatBackground(InputWorkspace=ws_name,
                                            OutputWorkspace=ws_name,
                                            StartX=self._background_range[0],
                                            EndX=self._background_range[1],
                                            Mode='Mean')
                    ConvertFromDistribution(Workspace=ws_name)

                # Divide by the calibration workspace if one was provided
                if self._calibration_ws is not None:
                    index_min = self._calibration_ws.getIndexFromSpectrumNumber(
                        int(self._spectra_range[0]))
                    index_max = self._calibration_ws.getIndexFromSpectrumNumber(
                        int(self._spectra_range[1]))

                    CropWorkspace(InputWorkspace=self._calibration_ws,
                                  OutputWorkspace=self._calibration_ws,
                                  StartWorkspaceIndex=index_min,
                                  EndWorkspaceIndex=index_max)

                    Divide(LHSWorkspace=ws_name,
                           RHSWorkspace=self._calibration_ws,
                           OutputWorkspace=ws_name)

                # Scale detector data by monitor intensities
                scale_detectors(ws_name, 'Indirect')

                # Remove the no longer needed monitor workspace
                DeleteWorkspace(monitor_ws_name)

                # Convert to energy
                ConvertUnits(InputWorkspace=ws_name,
                             OutputWorkspace=ws_name,
                             Target='DeltaE',
                             EMode='Indirect')
                CorrectKiKf(InputWorkspace=ws_name,
                            OutputWorkspace=ws_name,
                            EMode='Indirect')

                # Handle rebinning
                rebin_reduction(ws_name, self._rebin_string, rebin_string_2,
                                num_bins)

                # Detailed balance
                if self._detailed_balance != Property.EMPTY_DBL:
                    corr_factor = 11.606 / (2 * self._detailed_balance)
                    ExponentialCorrection(InputWorkspace=ws_name,
                                          OutputWorkspace=ws_name,
                                          C0=1.0,
                                          C1=corr_factor,
                                          Operation='Multiply')

                # Scale
                if self._scale_factor != 1.0:
                    Scale(InputWorkspace=ws_name,
                          OutputWorkspace=ws_name,
                          Factor=self._scale_factor,
                          Operation='Multiply')

                # Group spectra
                group_spectra(ws_name,
                              masked_detectors=masked_detectors,
                              method=self._grouping_method,
                              group_file=self._grouping_map_file,
                              group_ws=self._grouping_ws)

            if self._fold_multiple_frames and is_multi_frame:
                fold_chopped(c_ws_name)

            # Convert to output units if needed
            if self._output_x_units != 'DeltaE':
                ConvertUnits(InputWorkspace=c_ws_name,
                             OutputWorkspace=c_ws_name,
                             EMode='Indirect',
                             Target=self._output_x_units)

        # Rename output workspaces
        output_workspace_names = [
            rename_reduction(ws_name, self._sum_files)
            for ws_name in self._workspace_names
        ]

        summary_prog = Progress(self, start=0.9, end=1.0, nreports=4)

        # Group result workspaces
        summary_prog.report('grouping workspaces')
        GroupWorkspaces(InputWorkspaces=output_workspace_names,
                        OutputWorkspace=self._output_ws)

        self.setProperty('OutputWorkspace', mtd[self._output_ws])

        summary_prog.report('Algorithm complete')
    def PyExec(self):

        from IndirectReductionCommon import (
            get_multi_frame_rebin, identify_bad_detectors, unwrap_monitor,
            process_monitor_efficiency, scale_monitor, scale_detectors,
            rebin_reduction, group_spectra, fold_chopped, rename_reduction)

        self._setup()

        load_opts = dict()
        if self._instrument_name == 'VESUVIO':
            load_opts['InstrumentParFile'] = self._par_filename
            load_opts['Mode'] = 'FoilOut'
            load_opts['LoadMonitors'] = True

        self._workspace_names, self._chopped_data = load_file_ranges(
            self._data_files,
            self._ipf_filename,
            self._spectra_range[0],
            self._spectra_range[1],
            sum_files=self._sum_files,
            load_logs=self._load_logs,
            load_opts=load_opts)

        # applies the changes in the provided calibration file
        self._apply_calibration()
        # Load container if run is given
        self._load_and_scale_container(self._container_scale_factor, load_opts)

        # Load vanadium runs if given
        if self._vanadium_runs:
            self._vanadium_ws, _, _ = load_files(self._vanadium_runs,
                                                 self._ipf_filename,
                                                 self._spectra_range[0],
                                                 self._spectra_range[1],
                                                 load_logs=self._load_logs,
                                                 load_opts=load_opts)

            if len(self._workspace_names) > len(self._vanadium_runs):
                raise RuntimeError(
                    "There cannot be more sample runs than vanadium runs.")

        for index, c_ws_name in enumerate(self._workspace_names):
            is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)

            # Get list of workspaces
            if is_multi_frame:
                workspaces = mtd[c_ws_name].getNames()
            else:
                workspaces = [c_ws_name]

            # Process rebinning for framed data
            rebin_string_2, num_bins = get_multi_frame_rebin(
                c_ws_name, self._rebin_string)

            masked_detectors = identify_bad_detectors(workspaces[0])

            # Process workspaces
            for ws_name in workspaces:
                monitor_ws_name = ws_name + '_mon'

                # Subtract empty container if there is one
                if self._container_workspace is not None:
                    Minus(LHSWorkspace=ws_name,
                          RHSWorkspace=self._container_workspace,
                          OutputWorkspace=ws_name)

                if self._vanadium_ws:
                    van_ws_name = self._vanadium_ws[index]
                    van_ws = mtd[van_ws_name]
                    if self._container_workspace is not None:
                        cont_ws = mtd[self._container_workspace]

                        if van_ws.blocksize() > cont_ws.blocksize():
                            RebinToWorkspace(
                                WorkspaceToRebin=van_ws_name,
                                WorkspaceToMatch=self._container_workspace,
                                OutputWorkspace=van_ws_name)
                        elif cont_ws.blocksize() > van_ws.blocksize():
                            RebinToWorkspace(
                                WorkspaceToRebin=self._container_workspace,
                                WorkspaceToMatch=van_ws_name,
                                OutputWorkspace=self._container_workspace)

                        Minus(LHSWorkspace=van_ws_name,
                              RHSWorkspace=self._container_workspace,
                              OutputWorkspace=van_ws_name)

                    if mtd[ws_name].blocksize() > van_ws.blocksize():
                        RebinToWorkspace(WorkspaceToRebin=ws_name,
                                         WorkspaceToMatch=van_ws_name,
                                         OutputWorkspace=ws_name)
                    elif van_ws.blocksize() > mtd[ws_name].blocksize():
                        RebinToWorkspace(WorkspaceToRebin=van_ws_name,
                                         WorkspaceToMatch=ws_name,
                                         OutputWorkspace=van_ws_name)

                    replacement_value = 0.1 * find_minimum_non_zero_y_in_workspace(
                        van_ws)
                    logger.information(
                        'Replacing zeros in {0} with {1}.'.format(
                            van_ws_name, replacement_value))
                    ReplaceSpecialValues(
                        InputWorkspace=van_ws_name,
                        SmallNumberThreshold=0.0000001,
                        SmallNumberValue=replacement_value,
                        OutputWorkspace=self._replace_zeros_name)

                    Divide(LHSWorkspace=ws_name,
                           RHSWorkspace=self._replace_zeros_name,
                           OutputWorkspace=ws_name,
                           AllowDifferentNumberSpectra=True)

                    DeleteWorkspace(self._replace_zeros_name)

                # Process monitor
                if not unwrap_monitor(ws_name):
                    ConvertUnits(InputWorkspace=monitor_ws_name,
                                 OutputWorkspace=monitor_ws_name,
                                 Target='Wavelength',
                                 EMode='Elastic')

                process_monitor_efficiency(ws_name)
                scale_monitor(ws_name)

                # Scale detector data by monitor intensities
                scale_detectors(ws_name, 'Elastic')

                # Remove the no longer needed monitor workspace
                DeleteWorkspace(monitor_ws_name)

                # Convert to dSpacing
                ConvertUnits(InputWorkspace=ws_name,
                             OutputWorkspace=ws_name,
                             Target='dSpacing',
                             EMode='Elastic')

                # Handle rebinning
                rebin_reduction(ws_name, self._rebin_string, rebin_string_2,
                                num_bins)

                # Group spectra
                group_spectra(ws_name,
                              masked_detectors=masked_detectors,
                              method=self._grouping_method,
                              group_ws=self._grouping_workspace)

            if is_multi_frame:
                fold_chopped(c_ws_name)

        # Remove the container workspaces
        if self._container_workspace is not None:
            self._delete_all([self._container_workspace])

        # Remove the vanadium workspaces
        if self._vanadium_ws:
            self._delete_all(self._vanadium_ws)

        # Rename output workspaces
        output_workspace_names = [
            rename_reduction(ws_name, self._sum_files)
            for ws_name in self._workspace_names
        ]

        # Group result workspaces
        GroupWorkspaces(InputWorkspaces=output_workspace_names,
                        OutputWorkspace=self._output_ws)

        self.setProperty('OutputWorkspace', self._output_ws)
Exemplo n.º 7
0
    def PyExec(self):
        from IndirectReductionCommon import (
            get_multi_frame_rebin, identify_bad_detectors, unwrap_monitor,
            process_monitor_efficiency, scale_monitor, scale_detectors,
            rebin_reduction, group_spectra, fold_chopped, rename_reduction)

        self._setup()

        load_opts = dict()
        if self._instrument_name == 'VESUVIO':
            load_opts['Mode'] = 'FoilOut'

        self._workspace_names, self._chopped_data = load_files(
            self._data_files,
            self._ipf_filename,
            self._spectra_range[0],
            self._spectra_range[1],
            sum_files=self._sum_files,
            load_logs=self._load_logs,
            load_opts=load_opts)

        # applies the changes in the provided calibration file
        self._apply_calibration()
        # Load container if run is given
        self._load_and_scale_container(self._container_scale_factor, load_opts)

        for c_ws_name in self._workspace_names:
            is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)

            # Get list of workspaces
            if is_multi_frame:
                workspaces = mtd[c_ws_name].getNames()
            else:
                workspaces = [c_ws_name]

            # Process rebinning for framed data
            rebin_string_2, num_bins = get_multi_frame_rebin(
                c_ws_name, self._rebin_string)

            masked_detectors = identify_bad_detectors(workspaces[0])

            # Process workspaces
            for ws_name in workspaces:
                # Subtract empty container if there is one
                if self._container_workspace is not None:
                    Minus(LHSWorkspace=ws_name,
                          RHSWorkspace=self._container_workspace,
                          OutputWorkspace=ws_name)

                monitor_ws_name = ws_name + '_mon'

                # Process monitor
                if not unwrap_monitor(ws_name):
                    ConvertUnits(InputWorkspace=monitor_ws_name,
                                 OutputWorkspace=monitor_ws_name,
                                 Target='Wavelength',
                                 EMode='Elastic')

                process_monitor_efficiency(ws_name)
                scale_monitor(ws_name)

                # Scale detector data by monitor intensities
                scale_detectors(ws_name, 'Elastic')

                # Remove the no longer needed monitor workspace
                DeleteWorkspace(monitor_ws_name)

                # Convert to dSpacing
                ConvertUnits(InputWorkspace=ws_name,
                             OutputWorkspace=ws_name,
                             Target='dSpacing',
                             EMode='Elastic')

                # Handle rebinning
                rebin_reduction(ws_name, self._rebin_string, rebin_string_2,
                                num_bins)

                # Group spectra
                group_spectra(ws_name, masked_detectors, self._grouping_method)

            if is_multi_frame:
                fold_chopped(c_ws_name)

        # Remove the container workspaces
        if self._container_workspace is not None:
            DeleteWorkspace(self._container_workspace)
            DeleteWorkspace(self._container_workspace + '_mon')

        # Rename output workspaces
        output_workspace_names = [
            rename_reduction(ws_name, self._sum_files)
            for ws_name in self._workspace_names
        ]

        # Group result workspaces
        GroupWorkspaces(InputWorkspaces=output_workspace_names,
                        OutputWorkspace=self._output_ws)

        self.setProperty('OutputWorkspace', self._output_ws)
    def PyExec(self):
        """
        Execute the algorithm in diffraction-only mode
        """

        # Load all sample, vanadium files
        ipf_file_name = 'OSIRIS_diffraction_diffonly_Parameters.xml'
        sample_ws_names, _ = load_files(self._sample_runs,
                                        ipf_file_name,
                                        self._spec_min,
                                        self._spec_max,
                                        load_logs=self._load_logs)
        vanadium_ws_names, _ = load_files(self._vanadium_runs,
                                          ipf_file_name,
                                          self._spec_min,
                                          self._spec_max,
                                          load_logs=self._load_logs)
        container_ws_names = []

        # Load the container run
        if self._container_files:
            container_ws_names, _ = load_files(self._container_files,
                                               ipf_file_name,
                                               self._spec_min,
                                               self._spec_max,
                                               load_logs=self._load_logs)

            for container in container_ws_names:

                # Scale the container run if required
                if self._container_scale_factor != 1.0:
                    Scale(InputWorkspace=container,
                          OutputWorkspace=container,
                          Factor=self._container_scale_factor,
                          Operation='Multiply')

        # Add the sample workspaces to the dRange to sample map
        self._sam_ws_map = DRangeToWorkspaceMap()
        for idx in range(len(sample_ws_names)):
            sample = sample_ws_names[idx]

            if container_ws_names:
                container = container_ws_names[idx]
                RebinToWorkspace(WorkspaceToRebin=container,
                                 WorkspaceToMatch=sample,
                                 OutputWorkspace=container)

                Minus(LHSWorkspace=sample,
                      RHSWorkspace=container,
                      OutputWorkspace=sample)

            self._sam_ws_map.addWs(sample)

        # Add the vanadium workspaces to the dRange to vanadium map
        self._van_ws_map = DRangeToWorkspaceMap()
        for van in vanadium_ws_names:
            self._van_ws_map.addWs(van)

        # Finished with container now so delete it
        for container in container_ws_names:
            DeleteWorkspace(container)
            DeleteWorkspace(container + "_mon")

        # Check to make sure that there are corresponding vanadium files with the same DRange for each sample file.
        for d_range in self._sam_ws_map.getMap():
            if d_range not in self._van_ws_map.getMap():
                raise RuntimeError("There is no van file that covers the " +
                                   str(d_range) + " DRange.")

        # Average together any sample workspaces with the same DRange.
        # This will mean our map of DRanges to list of workspaces becomes a map
        # of DRanges, each to a *single* workspace.
        temp_sam_map = DRangeToWorkspaceMap()
        for d_range, ws_list in self._sam_ws_map.getMap().items():
            temp_sam_map.setItem(d_range, average_ws_list(ws_list))
        self._sam_ws_map = temp_sam_map

        # Now do the same to the vanadium workspaces.
        temp_van_map = DRangeToWorkspaceMap()
        for d_range, ws_list in self._van_ws_map.getMap().items():
            temp_van_map.setItem(d_range, average_ws_list(ws_list))
        self._van_ws_map = temp_van_map

        # Run necessary algorithms on BOTH the Vanadium and Sample workspaces.
        for d_range, wrksp in list(self._sam_ws_map.getMap().items()) + list(
                self._van_ws_map.getMap().items()):
            self.log().information('Wrksp:' + str(wrksp) + ' Cal:' +
                                   str(self._cal))
            NormaliseByCurrent(InputWorkspace=wrksp, OutputWorkspace=wrksp)
            AlignDetectors(InputWorkspace=wrksp,
                           OutputWorkspace=wrksp,
                           CalibrationFile=self._cal)
            DiffractionFocussing(InputWorkspace=wrksp,
                                 OutputWorkspace=wrksp,
                                 GroupingFileName=self._cal)
            CropWorkspace(InputWorkspace=wrksp,
                          OutputWorkspace=wrksp,
                          XMin=d_range[0],
                          XMax=d_range[1])

        # Divide all sample files by the corresponding vanadium files.
        for sam_ws, van_ws in zip(self._sam_ws_map.getMap().values(),
                                  self._van_ws_map.getMap().values()):
            sam_ws, van_ws = self._rebin_to_smallest(sam_ws, van_ws)
            Divide(LHSWorkspace=sam_ws,
                   RHSWorkspace=van_ws,
                   OutputWorkspace=sam_ws)
            ReplaceSpecialValues(InputWorkspace=sam_ws,
                                 OutputWorkspace=sam_ws,
                                 NaNValue=0.0,
                                 InfinityValue=0.0)

        # Create a list of sample workspace NAMES, since we need this for MergeRuns.
        samWsNamesList = list(self._sam_ws_map.getMap().values())

        if len(samWsNamesList) > 1:
            # Merge the sample files into one.
            MergeRuns(InputWorkspaces=samWsNamesList,
                      OutputWorkspace=self._output_ws_name)
            for name in samWsNamesList:
                DeleteWorkspace(Workspace=name)
                DeleteWorkspace(Workspace=name + "_mon")
        else:
            RenameWorkspace(InputWorkspace=samWsNamesList[0],
                            OutputWorkspace=self._output_ws_name)

        result = mtd[self._output_ws_name]

        # Create scalar data to cope with where merge has combined overlapping data.
        intersections = get_intersection_of_ranges(
            list(self._sam_ws_map.getMap().keys()))

        dataX = result.dataX(0)
        dataY = []
        dataE = []
        for i in range(0, len(dataX) - 1):
            x_val = (dataX[i] + dataX[i + 1]) / 2.0
            if is_in_ranges(intersections, x_val):
                dataY.append(2)
                dataE.append(2)
            else:
                dataY.append(1)
                dataE.append(1)

        # apply scalar data to result workspace
        for i in range(0, result.getNumberHistograms()):
            resultY = result.dataY(i)
            resultE = result.dataE(i)

            resultY = resultY / dataY
            resultE = resultE / dataE

            result.setY(i, resultY)
            result.setE(i, resultE)

        # Delete all workspaces we've created, except the result.
        for wrksp in self._van_ws_map.getMap().values():
            DeleteWorkspace(Workspace=wrksp)
            DeleteWorkspace(Workspace=wrksp + "_mon")

        self.setProperty("OutputWorkspace", result)
Exemplo n.º 9
0
    def PyExec(self):
        from IndirectReductionCommon import (load_files,
                                             get_multi_frame_rebin,
                                             identify_bad_detectors,
                                             unwrap_monitor,
                                             process_monitor_efficiency,
                                             scale_monitor,
                                             scale_detectors,
                                             rebin_reduction,
                                             group_spectra,
                                             fold_chopped,
                                             rename_reduction)

        self._setup()

        load_opts = dict()
        load_opts['Mode'] = 'FoilOut'
        load_opts['InstrumentParFile'] = self._par_filename

        prog_reporter = Progress(self, start=0.0, end=1.0, nreports=1)

        prog_reporter.report("Loading Files")

        self._workspace_names, self._chopped_data = load_files(self._data_files,
                                                               ipf_filename=self._ipf_filename,
                                                               spec_min=self._spectra_range[0],
                                                               spec_max=self._spectra_range[1],
                                                               sum_files=self._sum_files,
                                                               load_opts=load_opts)


        prog_reporter.resetNumSteps(self._workspace_names.__len__(), 0.0, 1.0)

        for c_ws_name in self._workspace_names:
            is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)

            # Get list of workspaces
            if is_multi_frame:
                workspaces = mtd[c_ws_name].getNames()
            else:
                workspaces = [c_ws_name]

            # Process rebinning for framed data
            rebin_string_2, num_bins = get_multi_frame_rebin(c_ws_name,
                                                             self._rebin_string)

            masked_detectors = identify_bad_detectors(workspaces[0])

            # Process workspaces
            for ws_name in workspaces:
                monitor_ws_name = ws_name + '_mon'

                # Process monitor
                if not unwrap_monitor(ws_name):
                    ConvertUnits(InputWorkspace=monitor_ws_name,
                                 OutputWorkspace=monitor_ws_name,
                                 Target='Wavelength',
                                 EMode='Elastic')

                process_monitor_efficiency(ws_name)
                scale_monitor(ws_name)

                # Scale detector data by monitor intensities
                scale_detectors(ws_name, 'Elastic')

                # Remove the no longer needed monitor workspace
                DeleteWorkspace(monitor_ws_name)

                # Convert to dSpacing
                ConvertUnits(InputWorkspace=ws_name,
                             OutputWorkspace=ws_name,
                             Target='dSpacing',
                             EMode='Elastic')

                # Handle rebinning
                rebin_reduction(ws_name,
                                self._rebin_string,
                                rebin_string_2,
                                num_bins)

                # Group spectra
                group_spectra(ws_name,
                              masked_detectors,
                              self._grouping_method)


            if is_multi_frame:
                fold_chopped(c_ws_name)

            prog_reporter.report()

        # Rename output workspaces
        output_workspace_names = [rename_reduction(ws_name, self._sum_files) for ws_name in self._workspace_names]

        # Group result workspaces
        GroupWorkspaces(InputWorkspaces=output_workspace_names,
                        OutputWorkspace=self._output_ws)

        self.setProperty('OutputWorkspace', self._output_ws)
Exemplo n.º 10
0
    def PyExec(self):
        from IndirectReductionCommon import (load_files,
                                             get_multi_frame_rebin,
                                             identify_bad_detectors,
                                             unwrap_monitor,
                                             process_monitor_efficiency,
                                             scale_monitor,
                                             scale_detectors,
                                             rebin_reduction,
                                             group_spectra,
                                             fold_chopped,
                                             rename_reduction,
                                             save_reduction,
                                             plot_reduction)

        self._setup()
        load_prog = Progress(self, start=0.0, end=0.10, nreports=2)
        load_prog.report('loading files')
        self._workspace_names, self._chopped_data = load_files(self._data_files,
                                                               self._ipf_filename,
                                                               self._spectra_range[0],
                                                               self._spectra_range[1],
                                                               self._sum_files,
                                                               self._load_logs)
        load_prog.report('files loaded')

        process_prog = Progress(self, start=0.1, end=0.9, nreports=len(self._workspace_names))
        for c_ws_name in self._workspace_names:
            process_prog.report('processing workspace' + c_ws_name)
            is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)

            # Get list of workspaces
            if is_multi_frame:
                workspaces = mtd[c_ws_name].getNames()
            else:
                workspaces = [c_ws_name]

            # Process rebinning for framed data
            rebin_string_2, num_bins = get_multi_frame_rebin(c_ws_name,
                                                             self._rebin_string)

            masked_detectors = identify_bad_detectors(workspaces[0])

            # Process workspaces
            for ws_name in workspaces:
                # Set Efixed if given to algorithm
                if self._efixed != Property.EMPTY_DBL:
                    SetInstrumentParameter(Workspace=ws_name,
                                           ComponentName=self._analyser,
                                           ParameterName='Efixed',
                                           ParameterType='Number',
                                           Value=str(self._efixed))

                monitor_ws_name = ws_name + '_mon'

                # Process monitor
                if not unwrap_monitor(ws_name):
                    ConvertUnits(InputWorkspace=monitor_ws_name,
                                 OutputWorkspace=monitor_ws_name,
                                 Target='Wavelength',
                                 EMode='Elastic')

                process_monitor_efficiency(ws_name)
                scale_monitor(ws_name)

                # Do background removal if a range was provided
                if self._background_range is not None:
                    ConvertToDistribution(Workspace=ws_name)
                    CalculateFlatBackground(InputWorkspace=ws_name,
                                            OutputWorkspace=ws_name,
                                            StartX=self._background_range[0],
                                            EndX=self._background_range[1],
                                            Mode='Mean')
                    ConvertFromDistribution(Workspace=ws_name)

                # Divide by the calibration workspace if one was provided
                if self._calibration_ws is not None:
                    index_min = self._calibration_ws.getIndexFromSpectrumNumber(int(self._spectra_range[0]))
                    index_max = self._calibration_ws.getIndexFromSpectrumNumber(int(self._spectra_range[1]))

                    CropWorkspace(InputWorkspace=self._calibration_ws,
                                  OutputWorkspace=self._calibration_ws,
                                  StartWorkspaceIndex=index_min,
                                  EndWorkspaceIndex=index_max)

                    Divide(LHSWorkspace=ws_name,
                           RHSWorkspace=self._calibration_ws,
                           OutputWorkspace=ws_name)

                # Scale detector data by monitor intensities
                scale_detectors(ws_name, 'Indirect')

                # Remove the no longer needed monitor workspace
                DeleteWorkspace(monitor_ws_name)

                # Convert to energy
                ConvertUnits(InputWorkspace=ws_name,
                             OutputWorkspace=ws_name,
                             Target='DeltaE',
                             EMode='Indirect')
                CorrectKiKf(InputWorkspace=ws_name,
                            OutputWorkspace=ws_name,
                            EMode='Indirect')

                # Handle rebinning
                rebin_reduction(ws_name,
                                self._rebin_string,
                                rebin_string_2,
                                num_bins)

                # Detailed balance
                if self._detailed_balance != Property.EMPTY_DBL:
                    corr_factor = 11.606 / (2 * self._detailed_balance)
                    ExponentialCorrection(InputWorkspace=ws_name,
                                          OutputWorkspace=ws_name,
                                          C0=1.0,
                                          C1=corr_factor,
                                          Operation='Multiply')

                # Scale
                if self._scale_factor != 1.0:
                    Scale(InputWorkspace=ws_name,
                          OutputWorkspace=ws_name,
                          Factor=self._scale_factor,
                          Operation='Multiply')

                # Group spectra
                group_spectra(ws_name,
                              masked_detectors=masked_detectors,
                              method=self._grouping_method,
                              group_file=self._grouping_map_file,
                              group_ws=self._grouping_ws)

            if self._fold_multiple_frames and is_multi_frame:
                fold_chopped(c_ws_name)

            # Convert to output units if needed
            if self._output_x_units != 'DeltaE':
                ConvertUnits(InputWorkspace=c_ws_name,
                             OutputWorkspace=c_ws_name,
                             EMode='Indirect',
                             Target=self._output_x_units)

        # Rename output workspaces
        output_workspace_names = [rename_reduction(ws_name, self._sum_files) for ws_name in self._workspace_names]

        summary_prog = Progress(self, start=0.9, end=1.0, nreports=4)

        # Save result workspaces
        if self._save_formats is not None:
            summary_prog.report('saving')
            save_reduction(output_workspace_names,
                           self._save_formats,
                           self._output_x_units)

        # Group result workspaces
        summary_prog.report('grouping workspaces')
        GroupWorkspaces(InputWorkspaces=output_workspace_names,
                        OutputWorkspace=self._output_ws)

        self.setProperty('OutputWorkspace', mtd[self._output_ws])

        # Plot result workspaces
        if self._plot_type != 'None':
            summary_prog.report('Plotting')
            for ws_name in mtd[self._output_ws].getNames():
                plot_reduction(ws_name, self._plot_type)
        summary_prog.report('Algorithm complete')
    def PyExec(self):

        from IndirectReductionCommon import (get_multi_frame_rebin,
                                             identify_bad_detectors,
                                             unwrap_monitor,
                                             process_monitor_efficiency,
                                             scale_monitor,
                                             scale_detectors,
                                             rebin_reduction,
                                             group_spectra,
                                             fold_chopped,
                                             rename_reduction)

        self._setup()

        load_opts = dict()
        if self._instrument_name == 'VESUVIO':
            load_opts['InstrumentParFile'] = self._par_filename
            load_opts['Mode'] = 'FoilOut'
            load_opts['LoadMonitors'] = True

        self._workspace_names, self._chopped_data = load_file_ranges(self._data_files,
                                                                     self._ipf_filename,
                                                                     self._spectra_range[0],
                                                                     self._spectra_range[1],
                                                                     sum_files=self._sum_files,
                                                                     load_logs=self._load_logs,
                                                                     load_opts=load_opts)

        # applies the changes in the provided calibration file
        self._apply_calibration()
        # Load container if run is given
        self._load_and_scale_container(self._container_scale_factor, load_opts)

        # Load vanadium runs if given
        if self._vanadium_runs:
            self._vanadium_ws, _ = load_files(self._vanadium_runs,
                                              self._ipf_filename,
                                              self._spectra_range[0],
                                              self._spectra_range[1],
                                              load_logs=self._load_logs,
                                              load_opts=load_opts)

            if len(self._workspace_names) > len(self._vanadium_runs):
                raise RuntimeError("There cannot be more sample runs than vanadium runs.")

        for index, c_ws_name in enumerate(self._workspace_names):
            is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)

            # Get list of workspaces
            if is_multi_frame:
                workspaces = mtd[c_ws_name].getNames()
            else:
                workspaces = [c_ws_name]

            # Process rebinning for framed data
            rebin_string_2, num_bins = get_multi_frame_rebin(c_ws_name,
                                                             self._rebin_string)

            masked_detectors = identify_bad_detectors(workspaces[0])

            # Process workspaces
            for ws_name in workspaces:
                monitor_ws_name = ws_name + '_mon'

                # Subtract empty container if there is one
                if self._container_workspace is not None:
                    Minus(LHSWorkspace=ws_name,
                          RHSWorkspace=self._container_workspace,
                          OutputWorkspace=ws_name)

                if self._vanadium_ws:
                    van_ws_name = self._vanadium_ws[index]
                    van_ws = mtd[van_ws_name]
                    if self._container_workspace is not None:
                        cont_ws = mtd[self._container_workspace]

                        if van_ws.blocksize() > cont_ws.blocksize():
                            RebinToWorkspace(WorkspaceToRebin=van_ws_name,
                                             WorkspaceToMatch=self._container_workspace,
                                             OutputWorkspace=van_ws_name)
                        elif cont_ws.blocksize() > van_ws.blocksize():
                            RebinToWorkspace(WorkspaceToRebin=self._container_workspace,
                                             WorkspaceToMatch=van_ws_name,
                                             OutputWorkspace=self._container_workspace)

                        Minus(LHSWorkspace=van_ws_name,
                              RHSWorkspace=self._container_workspace,
                              OutputWorkspace=van_ws_name)

                    if mtd[ws_name].blocksize() > van_ws.blocksize():
                        RebinToWorkspace(WorkspaceToRebin=ws_name,
                                         WorkspaceToMatch=van_ws_name,
                                         OutputWorkspace=ws_name)
                    elif van_ws.blocksize() > mtd[ws_name].blocksize():
                        RebinToWorkspace(WorkspaceToRebin=van_ws_name,
                                         WorkspaceToMatch=ws_name,
                                         OutputWorkspace=van_ws_name)

                    Divide(LHSWorkspace=ws_name,
                           RHSWorkspace=van_ws_name,
                           OutputWorkspace=ws_name,
                           AllowDifferentNumberSpectra=True)

                # Process monitor
                if not unwrap_monitor(ws_name):
                    ConvertUnits(InputWorkspace=monitor_ws_name,
                                 OutputWorkspace=monitor_ws_name,
                                 Target='Wavelength',
                                 EMode='Elastic')

                process_monitor_efficiency(ws_name)
                scale_monitor(ws_name)

                # Scale detector data by monitor intensities
                scale_detectors(ws_name, 'Elastic')

                # Remove the no longer needed monitor workspace
                DeleteWorkspace(monitor_ws_name)

                # Convert to dSpacing
                ConvertUnits(InputWorkspace=ws_name,
                             OutputWorkspace=ws_name,
                             Target='dSpacing',
                             EMode='Elastic')

                # Handle rebinning
                rebin_reduction(ws_name,
                                self._rebin_string,
                                rebin_string_2,
                                num_bins)

                # Group spectra
                group_spectra(ws_name,
                              masked_detectors=masked_detectors,
                              method=self._grouping_method,
                              group_ws=self._grouping_workspace)

            if is_multi_frame:
                fold_chopped(c_ws_name)

        # Remove the container workspaces
        if self._container_workspace is not None:
            self._delete_all([self._container_workspace])

        # Remove the vanadium workspaces
        if self._vanadium_ws:
            self._delete_all(self._vanadium_ws)

        # Rename output workspaces
        output_workspace_names = [rename_reduction(ws_name, self._sum_files) for ws_name in self._workspace_names]

        # Group result workspaces
        GroupWorkspaces(InputWorkspaces=output_workspace_names,
                        OutputWorkspace=self._output_ws)

        self.setProperty('OutputWorkspace', self._output_ws)
Exemplo n.º 12
0
    def PyExec(self):
        from IndirectReductionCommon import (load_files,
                                             get_multi_frame_rebin,
                                             identify_bad_detectors,
                                             unwrap_monitor,
                                             process_monitor_efficiency,
                                             scale_monitor,
                                             scale_detectors,
                                             rebin_reduction,
                                             group_spectra,
                                             fold_chopped,
                                             rename_reduction,
                                             save_reduction,
                                             plot_reduction)

        self._setup()
        self._workspace_names, self._chopped_data = load_files(self._data_files,
                                                              self._ipf_filename,
                                                              self._spectra_range[0],
                                                              self._spectra_range[1],
                                                              self._sum_files,
                                                              self._load_logs)

        for c_ws_name in self._workspace_names:
            is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)

            # Get list of workspaces
            if is_multi_frame:
                workspaces = mtd[c_ws_name].getNames()
            else:
                workspaces = [c_ws_name]

            # Process rebinning for framed data
            rebin_string_2, num_bins = get_multi_frame_rebin(c_ws_name,
                                                             self._rebin_string)

            masked_detectors = identify_bad_detectors(workspaces[0])

            # Process workspaces
            for ws_name in workspaces:
                monitor_ws_name = ws_name + '_mon'

                # Process monitor
                if not unwrap_monitor(ws_name):
                    ConvertUnits(InputWorkspace=monitor_ws_name,
                                 OutputWorkspace=monitor_ws_name,
                                 Target='Wavelength',
                                 EMode='Elastic')

                process_monitor_efficiency(ws_name)
                scale_monitor(ws_name)

                # Do background removal if a range was provided
                if self._background_range is not None:
                    ConvertToDistribution(Workspace=ws_name)
                    CalculateFlatBackground(InputWorkspace=ws_name,
                                            OutputWorkspace=ws_name,
                                            StartX=self._background_range[0],
                                            EndX=self._background_range[1],
                                            Mode='Mean')
                    ConvertFromDistribution(Workspace=ws_name)

                # Divide by the calibration workspace if one was provided
                if self._calibration_ws is not None:
                    Divide(LHSWorkspace=ws_name,
                           RHSWorkspace=self._calibration_ws,
                           OutputWorkspace=ws_name)

                # Scale detector data by monitor intensities
                scale_detectors(ws_name, 'Indirect')

                # Remove the no longer needed monitor workspace
                DeleteWorkspace(monitor_ws_name)

                # Convert to energy
                ConvertUnits(InputWorkspace=ws_name,
                             OutputWorkspace=ws_name,
                             Target='DeltaE',
                             EMode='Indirect')
                CorrectKiKf(InputWorkspace=ws_name,
                            OutputWorkspace=ws_name,
                            EMode='Indirect')

                # Handle rebinning
                rebin_reduction(ws_name,
                                self._rebin_string,
                                rebin_string_2,
                                num_bins)

                # Detailed balance
                if self._detailed_balance is not None:
                    corr_factor = 11.606 / (2 * self._detailed_balance)
                    ExponentialCorrection(InputWorkspace=ws_name,
                                          OutputWorkspace=ws_name,
                                          C0=1.0,
                                          C1=corr_factor,
                                          Operation='Multiply')

                # Scale
                if self._scale_factor != 1.0:
                    Scale(InputWorkspaces=ws_name,
                          OutputWorkspace=ws_name,
                          Factor=self._scale_factor,
                          Operation='Multiply')

                # Group spectra
                group_spectra(ws_name,
                              masked_detectors,
                              self._grouping_method,
                              self._grouping_map_file,
                              self._grouping_ws)

            if self._fold_multiple_frames and is_multi_frame:
                fold_chopped(c_ws_name)

            # Convert to output units if needed
            if self._output_x_units != 'DeltaE':
                ConvertUnits(InputWorkspace=c_ws_name,
                             OutputWorkspace=c_ws_name,
                             EMode='Indirect',
                             Target=self._output_x_units)

        # Rename output workspaces
        output_workspace_names = [rename_reduction(ws_name, self._sum_files) for ws_name in self._workspace_names]

        # Save result workspaces
        if self._save_formats is not None:
            save_reduction(output_workspace_names,
                           self._save_formats,
                           self._output_x_units)

        # Group result workspaces
        GroupWorkspaces(InputWorkspaces=output_workspace_names,
                        OutputWorkspace=self._output_ws)

        self.setProperty('OutputWorkspace', self._output_ws)

        # Plot result workspaces
        if self._plot_type != 'None':
            for ws_name in mtd[self._output_ws].getNames():
                plot_reduction(ws_name, self._plot_type)
    def PyExec(self):
        from IndirectReductionCommon import (load_files,
                                             get_multi_frame_rebin,
                                             identify_bad_detectors,
                                             unwrap_monitor,
                                             process_monitor_efficiency,
                                             scale_monitor,
                                             scale_detectors,
                                             rebin_reduction,
                                             group_spectra,
                                             fold_chopped,
                                             rename_reduction)

        self._setup()

        load_opts = dict()
        if self._instrument_name == 'VESUVIO':
            load_opts['Mode'] = 'FoilOut'

        self._workspace_names, self._chopped_data = load_files(self._data_files,
                                                               self._ipf_filename,
                                                               self._spectra_range[0],
                                                               self._spectra_range[1],
                                                               sum_files=self._sum_files,
                                                               load_logs=self._load_logs,
                                                               load_opts=load_opts)

        # Load container if run is given
        if self._container_data_files is not None:
            self._container_workspace, _ = load_files(self._container_data_files,
                                                      self._ipf_filename,
                                                      self._spectra_range[0],
                                                      self._spectra_range[1],
                                                      sum_files=True,
                                                      load_logs=self._load_logs,
                                                      load_opts=load_opts)
            self._container_workspace = self._container_workspace[0]

            # Scale container if factor is given
            if self._container_scale_factor != 1.0:
                Scale(InputWorkspace=self._container_workspace,
                      OutputWorkspace=self._container_workspace,
                      Factor=self._container_scale_factor,
                      Operation='Multiply')

        for c_ws_name in self._workspace_names:
            is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)

            # Get list of workspaces
            if is_multi_frame:
                workspaces = mtd[c_ws_name].getNames()
            else:
                workspaces = [c_ws_name]

            # Process rebinning for framed data
            rebin_string_2, num_bins = get_multi_frame_rebin(c_ws_name,
                                                             self._rebin_string)

            masked_detectors = identify_bad_detectors(workspaces[0])

            # Process workspaces
            for ws_name in workspaces:
                # Subtract empty container if there is one
                if self._container_workspace is not None:
                    Minus(LHSWorkspace=ws_name,
                          RHSWorkspace=self._container_workspace,
                          OutputWorkspace=ws_name)

                monitor_ws_name = ws_name + '_mon'

                # Process monitor
                if not unwrap_monitor(ws_name):
                    ConvertUnits(InputWorkspace=monitor_ws_name,
                                 OutputWorkspace=monitor_ws_name,
                                 Target='Wavelength',
                                 EMode='Elastic')

                process_monitor_efficiency(ws_name)
                scale_monitor(ws_name)

                # Scale detector data by monitor intensities
                scale_detectors(ws_name, 'Elastic')

                # Remove the no longer needed monitor workspace
                DeleteWorkspace(monitor_ws_name)

                # Convert to dSpacing
                ConvertUnits(InputWorkspace=ws_name,
                             OutputWorkspace=ws_name,
                             Target='dSpacing',
                             EMode='Elastic')

                # Handle rebinning
                rebin_reduction(ws_name,
                                self._rebin_string,
                                rebin_string_2,
                                num_bins)

                # Group spectra
                group_spectra(ws_name,
                              masked_detectors,
                              self._grouping_method)

            if is_multi_frame:
                fold_chopped(c_ws_name)

        # Remove the container workspaces
        if self._container_workspace is not None:
            DeleteWorkspace(self._container_workspace)
            DeleteWorkspace(self._container_workspace + '_mon')

        # Rename output workspaces
        output_workspace_names = [rename_reduction(ws_name, self._sum_files) for ws_name in self._workspace_names]

        # Group result workspaces
        GroupWorkspaces(InputWorkspaces=output_workspace_names,
                        OutputWorkspace=self._output_ws)

        self.setProperty('OutputWorkspace', self._output_ws)
Exemplo n.º 14
0
    def PyExec(self):

        warnings.warn("This algorithm is depreciated (April-2017). Please use ISISIndirectDiffractionReduction")

        from IndirectReductionCommon import (load_files,
                                             get_multi_frame_rebin,
                                             identify_bad_detectors,
                                             unwrap_monitor,
                                             process_monitor_efficiency,
                                             scale_monitor,
                                             scale_detectors,
                                             rebin_reduction,
                                             group_spectra,
                                             fold_chopped,
                                             rename_reduction)

        self._setup()

        load_opts = dict()
        load_opts['Mode'] = 'FoilOut'
        load_opts['InstrumentParFile'] = self._par_filename
        # Tell LoadVesuvio to load the monitors and keep them in the output
        load_opts['LoadMonitors'] = True

        prog_reporter = Progress(self, start=0.0, end=1.0, nreports=1)

        prog_reporter.report("Loading Files")
        self._workspace_names, self._chopped_data = load_files(self._data_files,
                                                               ipf_filename=self._ipf_filename,
                                                               spec_min=self._spectra_range[0],
                                                               spec_max=self._spectra_range[1],
                                                               sum_files=self._sum_files,
                                                               load_opts=load_opts)

        prog_reporter.resetNumSteps(len(self._workspace_names), 0.0, 1.0)

        for c_ws_name in self._workspace_names:
            is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)

            # Get list of workspaces
            if is_multi_frame:
                workspaces = mtd[c_ws_name].getNames()
            else:
                workspaces = [c_ws_name]

            # Process rebinning for framed data
            rebin_string_2, num_bins = get_multi_frame_rebin(c_ws_name,
                                                             self._rebin_string)

            masked_detectors = identify_bad_detectors(workspaces[0])

            # Process workspaces
            for ws_name in workspaces:
                monitor_ws_name = ws_name + '_mon'

                # Process monitor
                if not unwrap_monitor(ws_name):
                    ConvertUnits(InputWorkspace=monitor_ws_name,
                                 OutputWorkspace=monitor_ws_name,
                                 Target='Wavelength',
                                 EMode='Elastic')

                process_monitor_efficiency(ws_name)
                scale_monitor(ws_name)

                # Scale detector data by monitor intensities
                scale_detectors(ws_name, 'Elastic')

                # Remove the no longer needed monitor workspace
                DeleteWorkspace(monitor_ws_name)

                # Convert to dSpacing
                ConvertUnits(InputWorkspace=ws_name,
                             OutputWorkspace=ws_name,
                             Target='dSpacing',
                             EMode='Elastic')

                # Handle rebinning
                rebin_reduction(ws_name,
                                self._rebin_string,
                                rebin_string_2,
                                num_bins)

                # Group spectra
                group_spectra(ws_name,
                              masked_detectors,
                              self._grouping_method)

            if is_multi_frame:
                fold_chopped(c_ws_name)

            prog_reporter.report()

        # Rename output workspaces
        output_workspace_names = [rename_reduction(ws_name, self._sum_files) for ws_name in self._workspace_names]

        # Group result workspaces
        GroupWorkspaces(InputWorkspaces=output_workspace_names,
                        OutputWorkspace=self._output_ws)

        self.setProperty('OutputWorkspace', self._output_ws)
Exemplo n.º 15
0
    def PyExec(self):
        """
        Execute the algorithm in diffraction-only mode
        """

        # Load all sample, vanadium files
        ipf_file_name = 'OSIRIS_diffraction_diffonly_Parameters.xml'
        load_opts = {"DeleteMonitors": True}

        sample_ws_names, _ = load_files(self._sample_runs,
                                        ipf_file_name,
                                        self._spec_min,
                                        self._spec_max,
                                        load_logs=self._load_logs,
                                        load_opts=load_opts)
        # Add the sample workspaces to the sample d-range map
        self._sam_ws_map.add_workspaces(
            [mtd[sample_ws_name] for sample_ws_name in sample_ws_names],
            rebin_and_average)

        vanadium_ws_names, _ = load_files(self._vanadium_runs,
                                          ipf_file_name,
                                          self._spec_min,
                                          self._spec_max,
                                          load_logs=self._load_logs,
                                          load_opts=load_opts)
        # Add the vanadium workspaces to the vanadium drange map
        self._van_ws_map.add_workspaces(
            [mtd[vanadium_ws_name] for vanadium_ws_name in vanadium_ws_names],
            rebin_and_average)

        # Load the container run
        if self._container_files:
            container_ws_names, _ = load_files(self._container_files,
                                               ipf_file_name,
                                               self._spec_min,
                                               self._spec_max,
                                               load_logs=self._load_logs,
                                               load_opts=load_opts)

            # Scale the container run if required
            if self._container_scale_factor != 1.0:
                self._con_ws_map.add_workspaces([
                    mtd[container_ws_name] * self._container_scale_factor
                    for container_ws_name in container_ws_names
                ], rebin_and_average)
            else:
                self._con_ws_map.add_workspaces([
                    mtd[container_ws_name]
                    for container_ws_name in container_ws_names
                ], rebin_and_average)

            result_map = self._sam_ws_map.combine(self._con_ws_map,
                                                  rebin_and_subtract)
            self._delete_workspaces(container_ws_names)
        else:
            result_map = self._sam_ws_map

        # Run necessary algorithms on the Sample workspaces.
        self._calibrate_runs_in_map(result_map)

        # Run necessary algorithms on the Vanadium workspaces.
        self._calibrate_runs_in_map(self._van_ws_map)

        # Divide all sample files by the corresponding vanadium files.
        result_map = result_map.combine(self._van_ws_map, divide_workspace)

        # Workspaces in vanadium map are no longer in the ADS - can safely delete
        # vanadium workspaces in ADS.
        self._delete_workspaces(vanadium_ws_names)

        # Workspaces in sample map are no longer in the ADS - can safely delete
        # sample workspaces in the ADS.
        self._delete_workspaces(sample_ws_names)

        if len(result_map) > 1:
            # Workspaces must be added to the ADS, as there does not yet exist
            # a workspace list property (must be passed to merge runs by name).
            for sample_ws_name, sample_ws in zip(sample_ws_names,
                                                 result_map.values()):
                mtd.addOrReplace(sample_ws_name, sample_ws)

            # Merge the sample files into one.
            output_ws = MergeRuns(InputWorkspaces=sample_ws_names,
                                  OutputWorkspace="merged_sample_runs",
                                  StoreInADS=False,
                                  EnableLogging=False)
            self._delete_workspaces(sample_ws_names)
        elif len(result_map) == 1:
            output_ws = list(result_map.values())[0]
        else:
            logger.error("D-Ranges found in runs have no overlap:\n" +
                         "Found Sample D-Ranges: " +
                         ", ".join(map(str, self._sam_ws_map.keys())) + "\n" +
                         "Found Container D-Ranges: " +
                         ", ".join(map(str, self._con_ws_map.keys())) + "\n" +
                         "Found Vanadium D-Ranges: " +
                         ", ".join(map(str, self._van_ws_map.keys())))
            return

        if self._output_ws_name:
            mtd.addOrReplace(self._output_ws_name, output_ws)

        d_ranges = result_map.keys()
        AddSampleLog(Workspace=output_ws,
                     LogName="D-Ranges",
                     LogText="D-Ranges used for reduction: " +
                     ", ".join(map(str, d_ranges)))

        # Create scalar data to cope with where merge has combined overlapping data.
        intersections = get_intersection_of_ranges(d_ranges)

        data_x = output_ws.dataX(0)
        data_y = []
        data_e = []
        for i in range(0, len(data_x) - 1):
            x_val = (data_x[i] + data_x[i + 1]) / 2.0

            if is_in_ranges(intersections, x_val):
                data_y.append(2)
                data_e.append(2)
            else:
                data_y.append(1)
                data_e.append(1)

        # apply scalar data to result workspace
        for i in range(0, output_ws.getNumberHistograms()):
            result_y = output_ws.dataY(i)
            result_e = output_ws.dataE(i)

            result_y = result_y / data_y
            result_e = result_e / data_e

            output_ws.setY(i, result_y)
            output_ws.setE(i, result_e)

        self.setProperty("OutputWorkspace", output_ws)
Exemplo n.º 16
0
    def PyExec(self):
        """
        Execute the algorithm in diffraction-only mode
        """

        # Load all sample, vanadium files
        ipf_file_name = 'OSIRIS_diffraction_diffonly_Parameters.xml'
        load_opts = {"DeleteMonitors": True}

        sample_ws_names, _ = load_files(self._sample_runs,
                                        ipf_file_name,
                                        self._spec_min,
                                        self._spec_max,
                                        load_logs=self._load_logs,
                                        load_opts=load_opts)

        vanadium_ws_names, _ = load_files(self._vanadium_runs,
                                          ipf_file_name,
                                          self._spec_min,
                                          self._spec_max,
                                          load_logs=self._load_logs,
                                          load_opts=load_opts)

        container_ws_names = []
        container_workspaces = []

        # Load the container run
        if self._container_files:
            container_ws_names, _ = load_files(self._container_files,
                                               ipf_file_name,
                                               self._spec_min,
                                               self._spec_max,
                                               load_logs=self._load_logs,
                                               load_opts=load_opts)

            # Scale the container run if required
            if self._container_scale_factor != 1.0:

                # Retrieve function pointers in advance to avoid expensive hash
                # function on each loop iteration (improves performance)
                scale_get_property, scale_set_property, scale_exec = self._init_child_algorithm(
                    "Scale")
                scale_set_property("Operation", "Multiply")

                # Scale every container workspace
                for container_ws_name in container_ws_names:
                    scale_set_property("InputWorkspace", container_ws_name)
                    scale_set_property("Factor", self._container_scale_factor)
                    scale_exec()
                    container_workspaces.append(
                        scale_get_property("OutputWorkspace").value)
            else:
                container_workspaces = container_ws_names

        # Initialize rebin algorithm and retrieve function pointers to improve performance
        rebin_get_property, rebin_set_property, rebin_exec \
            = self._init_child_algorithm("RebinToWorkspace")

        # Initialize minus algorithm and retrieve function pointers to improve performance
        minus_get_property, minus_set_property, minus_exec \
            = self._init_child_algorithm("Minus")

        # Add the sample workspaces to the dRange to sample map
        for idx, sample_ws_name in enumerate(sample_ws_names):

            if container_workspaces:
                rebin_set_property("WorkspaceToRebin",
                                   container_workspaces[idx])
                rebin_set_property("WorkspaceToMatch", sample_ws_name)
                rebin_exec()

                minus_set_property("LHSWorkspace", sample_ws_name)
                minus_set_property("RHSWorkspace",
                                   rebin_get_property("OutputWorkspace").value)
                minus_exec()
                sample_ws = minus_get_property("OutputWorkspace").value
            else:
                sample_ws = mtd[sample_ws_name]

            if self._man_d_range is not None and idx < len(self._man_d_range):
                self._sam_ws_map.add_ws(sample_ws, self._man_d_range[idx])
            else:
                self._sam_ws_map.add_ws(sample_ws)

        # Initialize delete workspace algorithm and retrieve function pointers
        # to improve performance
        _, delete_set_property, delete_exec \
            = self._init_child_algorithm("DeleteWorkspace")

        # Finished with container workspaces, so delete them
        for container_ws_name in container_ws_names:
            delete_set_property("Workspace", container_ws_name)
            delete_exec()

        # Add the vanadium workspaces to the vanadium drange map
        self._add_to_drange_map(vanadium_ws_names, self._van_ws_map)

        # Check to make sure that there are corresponding vanadium files with the same DRange for each sample file.
        for d_range in self._sam_ws_map:
            if d_range not in self._van_ws_map:
                raise RuntimeError("There is no van file that covers the " +
                                   str(d_range) + " DRange.")

        # Average together any sample workspaces with the same DRange.
        # This will mean our map of DRanges to list of workspaces becomes a map
        # of DRanges, each to a *single* workspace.
        self._sam_ws_map.average_across_dranges()

        # Now do the same to the vanadium workspaces
        self._van_ws_map.average_across_dranges()

        # Create NormaliseByCurrent algorithm and retrieve function pointers to improve performance
        normalise_get_property, normalise_set_property, normalise_exec \
            = self._init_child_algorithm("NormaliseByCurrent")

        # Create AlignDetectors algorithm and retrieve function pointers to improve performance
        align_get_property, align_set_property, align_exec \
            = self._init_child_algorithm("AlignDetectors")

        # Create DiffractionFocussing algorithm and retrieve function pointers to improve performance
        diff_focus_get_property, diff_focus_set_property, diff_focus_exec \
            = self._init_child_algorithm("DiffractionFocussing")

        # Create CropWorkspace algorithm and retrieve function pointers to improve performance
        crop_get_property, crop_set_property, crop_exec \
            = self._init_child_algorithm("CropWorkspace")

        # Run necessary algorithms on the Sample workspaces.
        for d_range, wrksp in self._sam_ws_map.items():
            normalise_set_property("InputWorkspace", wrksp)
            normalise_exec()

            align_set_property("InputWorkspace",
                               normalise_get_property("OutputWorkspace").value)
            align_set_property("CalibrationFile", self._cal)
            align_exec()

            diff_focus_set_property(
                "InputWorkspace",
                align_get_property("OutputWorkspace").value)
            diff_focus_set_property("GroupingFileName", self._cal)
            diff_focus_exec()

            crop_set_property("InputWorkspace",
                              diff_focus_get_property("OutputWorkspace").value)
            crop_set_property("XMin", d_range[0])
            crop_set_property("XMax", d_range[1])
            crop_exec()

            self._sam_ws_map[d_range] = crop_get_property(
                "OutputWorkspace").value

        # Run necessary algorithms on the Vanadium workspaces.
        for d_range, wrksp in self._van_ws_map.items():
            normalise_set_property("InputWorkspace", wrksp)
            normalise_exec()

            align_set_property("InputWorkspace",
                               normalise_get_property("OutputWorkspace").value)
            align_set_property("CalibrationFile", self._cal)
            align_exec()

            diff_focus_set_property(
                "InputWorkspace",
                align_get_property("OutputWorkspace").value)
            diff_focus_set_property("GroupingFileName", self._cal)
            diff_focus_exec()

            crop_set_property("InputWorkspace",
                              diff_focus_get_property("OutputWorkspace").value)
            crop_set_property("XMin", d_range[0])
            crop_set_property("XMax", d_range[1])
            crop_exec()

            self._van_ws_map[d_range] = crop_get_property(
                "OutputWorkspace").value

        # Workspaces in vanadium map are no longer in the ADS - can safely delete
        # vanadium workspaces in ADS.
        self._delete_workspaces(vanadium_ws_names, delete_set_property,
                                delete_exec)

        # Divide all sample files by the corresponding vanadium files.
        divided = self._divide_all_by(self._sam_ws_map.values(),
                                      self._van_ws_map.values())

        # Workspaces must be added to the ADS, as there does not yet exist
        # a workspace list property (must be passed to merge runs by name).
        for sample_ws_name, sample_ws in zip(sample_ws_names, divided):
            mtd.addOrReplace(sample_ws_name, sample_ws)

        if len(divided) > 1:
            # Merge the sample files into one.
            merge_runs_alg = self.createChildAlgorithm("MergeRuns",
                                                       enableLogging=False)
            merge_runs_alg.setProperty("InputWorkspaces", sample_ws_names)
            merge_runs_alg.execute()
            output_ws = merge_runs_alg.getProperty("OutputWorkspace").value
        else:
            output_ws = divided[0]

        # Sample workspaces are now finished with and can be deleted
        # safely from the ADS.
        self._delete_workspaces(sample_ws_names, delete_set_property,
                                delete_exec)

        mtd.addOrReplace(self._output_ws_name, output_ws)

        add_log_alg = self.createChildAlgorithm("AddSampleLog",
                                                enableLogging=False)
        add_log_alg.setProperty("Workspace", output_ws)
        add_log_alg.setProperty("LogName", "D-Ranges")
        add_log_alg.setProperty(
            "LogText",
            "D-Ranges used for reduction: " + self.getPropertyValue("Drange"))

        result = mtd[self._output_ws_name]

        # Create scalar data to cope with where merge has combined overlapping data.
        intersections = get_intersection_of_ranges(self._sam_ws_map.keys())

        data_x = result.dataX(0)
        data_y = []
        data_e = []
        for i in range(0, len(data_x) - 1):
            x_val = (data_x[i] + data_x[i + 1]) / 2.0

            if is_in_ranges(intersections, x_val):
                data_y.append(2)
                data_e.append(2)
            else:
                data_y.append(1)
                data_e.append(1)

        # apply scalar data to result workspace
        for i in range(0, result.getNumberHistograms()):
            result_y = result.dataY(i)
            result_e = result.dataE(i)

            result_y = result_y / data_y
            result_e = result_e / data_e

            result.setY(i, result_y)
            result.setE(i, result_e)

        self.setProperty("OutputWorkspace", result)