Esempio n. 1
0
 def _groupDetectors(self, mainWS):
     """Group detectors with similar thetas."""
     instrument = mainWS.getInstrument()
     fileHandle, path = tempfile.mkstemp(suffix='.xml', prefix='grouping-{}-'.format(instrument.getName()))
     # We don't need the handle, just the path.
     os.close(fileHandle)
     angleStepProperty = self.getProperty(common.PROP_GROUPING_ANGLE_STEP)
     if angleStepProperty.isDefault:
         if instrument.hasParameter('natural-angle-step'):
             angleStep = instrument.getNumberParameter('natural-angle-step', recursive=False)[0]
             self._report.notice('Using grouping angle step of {} degrees from the IPF.'.format(angleStep))
         else:
             angleStep = 0.01
             self._report.notice('Using the default grouping angle step of {} degrees.'.format(angleStep))
     else:
         angleStep = angleStepProperty.value
     GenerateGroupingPowder(InputWorkspace=mainWS,
                            AngleStep=angleStep,
                            GroupingFilename=path,
                            GenerateParFile=False,
                            EnableLogging=self._subalgLogging)
     try:
         groupedWSName = self._names.withSuffix('grouped_detectors')
         groupedWS = GroupDetectors(InputWorkspace=mainWS,
                                    OutputWorkspace=groupedWSName,
                                    MapFile=path,
                                    KeepUngroupedSpectra=False,
                                    Behaviour='Average',
                                    EnableLogging=self._subalgLogging)
         self._cleanup.cleanup(mainWS)
         return groupedWS
     finally:
         os.remove(path)
Esempio n. 2
0
 def _group_detectors(input_ws, grouping_pattern):
     """Groups detectors of the input workspace according to the provided pattern and returns
     the name of the grouped workspace."""
     output_ws = f'{input_ws}_grouped'
     GroupDetectors(InputWorkspace=input_ws, OutputWorkspace=output_ws,
                    GroupingPattern=grouping_pattern, Behaviour='Average')
     return output_ws
    def fitted_in_dspacing(self,
                           fitted_in_tof: Union[str, Workspace2D],
                           workspace_with_instrument: Union[str, Workspace],
                           output_workspace: str,
                           dspacing_bin_width: float = 0.001,
                           grouping_workspace: Optional[str] = None) -> None:
        r"""
        Create one spectra of fitted peaks in d-spacing for each pixel or group of detectors

        This algorithm will perform a unit converstion from TOF to d-spacing. The instrument geometry
        for the conversion is proviged by the instrument embedded in `workspace_with_instrument`,
        instead of the instrument embedded in `fitted_in_tof`.

        @param fitted_in_tof : fitted spectra in TOF, one per pixel
        @param workspace_with_instrument : workspace providing the instrument with the desired geometry
        @param output_workspace : name of the workspace containing the spectra in d-spacing
        @param dspacing_bin_width : bin width for the spectra of `output_workspace`
        @param grouping_workspace: if provided, generate one spectra in d-spacing for each of the
            groups specified in this grouping workspace.
        """
        CloneWorkspace(InputWorkspace=fitted_in_tof,
                       OutputWorkspace=output_workspace)
        CopyInstrumentParameters(InputWorkspace=workspace_with_instrument,
                                 OutputWorkspace=output_workspace)
        ConvertUnits(InputWorkspace=output_workspace,
                     OutputWorkspace=output_workspace,
                     Target='dSpacing',
                     Emode='Elastic')
        # Rebin spectra to common bin boundaries. This is required if grouping is desired
        peak_centers_reference = self.getProperty('PeakPositions').value
        dspacing_extra = 1.0  # 1 Angstrom
        dspacing_min = max(0, min(peak_centers_reference) - dspacing_extra)
        dspacing_max = max(peak_centers_reference) + dspacing_extra
        Rebin(InputWorkspace=output_workspace,
              OutputWorkspace=output_workspace,
              Params=[dspacing_min, dspacing_bin_width,
                      dspacing_max])  # bin width is 0.001 Angstroms
        # Optional groping into banks
        if grouping_workspace is not None:
            GroupDetectors(InputWorkspace=output_workspace,
                           OutputWorkspace=output_workspace,
                           CopyGroupingFromWorkspace=grouping_workspace)
            insert_bank_numbers(
                output_workspace,
                grouping_workspace)  # label each spectrum with the bank number
Esempio n. 4
0
 def _groupDetectors(self, mainWS, wsNames, wsCleanup, subalgLogging):
     """Group detectors with similar thetas."""
     import os
     import tempfile
     groups = _createDetectorGroups(mainWS)
     instrumentName = mainWS.getInstrument().getName()
     groupsXml = _detectorGroupsToXml(groups, instrumentName)
     fileHandle, path = tempfile.mkstemp(suffix='.xml', prefix='grouping-{}-'.format(instrumentName))
     _writeXml(groupsXml, path)
     os.close(fileHandle)
     groupedWSName = wsNames.withSuffix('grouped_detectors')
     groupedWS = GroupDetectors(InputWorkspace=mainWS,
                                OutputWorkspace=groupedWSName,
                                MapFile=path,
                                KeepUngroupedSpectra=False,
                                Behaviour='Average',
                                EnableLogging=subalgLogging)
     wsCleanup.cleanup(mainWS)
     os.remove(path)
     return groupedWS
Esempio n. 5
0
    def PyExec(self):
        _background = bool(self.getProperty("Background").value)
        _load_inst = bool(self.getProperty("LoadInstrument").value)
        _norm_current = bool(self.getProperty("NormaliseByCurrent").value)
        _detcal = bool(self.getProperty("DetCal").value)
        _masking = bool(self.getProperty("MaskFile").value)
        _grouping = bool(self.getProperty("GroupingFile").value)
        _anvred = bool(self.getProperty("SphericalAbsorptionCorrection").value)
        _SA_name = self.getPropertyValue("SolidAngleOutputWorkspace")
        _Flux_name = self.getPropertyValue("FluxOutputWorkspace")

        XMin = self.getProperty("MomentumMin").value
        XMax = self.getProperty("MomentumMax").value
        rebin_param = ','.join([str(XMin), str(XMax), str(XMax)])

        Load(Filename=self.getPropertyValue("Filename"),
             OutputWorkspace='__van',
             FilterByTofMin=self.getProperty("FilterByTofMin").value,
             FilterByTofMax=self.getProperty("FilterByTofMax").value)

        if _norm_current:
            NormaliseByCurrent(InputWorkspace='__van', OutputWorkspace='__van')

        if _background:
            Load(Filename=self.getProperty("Background").value,
                 OutputWorkspace='__bkg',
                 FilterByTofMin=self.getProperty("FilterByTofMin").value,
                 FilterByTofMax=self.getProperty("FilterByTofMax").value)
            if _norm_current:
                NormaliseByCurrent(InputWorkspace='__bkg',
                                   OutputWorkspace='__bkg')
            else:
                pc_van = mtd['__van'].run().getProtonCharge()
                pc_bkg = mtd['__bkg'].run().getProtonCharge()
                mtd['__bkg'] *= pc_van / pc_bkg
            mtd['__bkg'] *= self.getProperty('BackgroundScale').value
            Minus(LHSWorkspace='__van',
                  RHSWorkspace='__bkg',
                  OutputWorkspace='__van')
            DeleteWorkspace('__bkg')

        if _load_inst:
            LoadInstrument(Workspace='__van',
                           Filename=self.getProperty("LoadInstrument").value,
                           RewriteSpectraMap=False)
        if _detcal:
            LoadIsawDetCal(InputWorkspace='__van',
                           Filename=self.getProperty("DetCal").value)

        if _masking:
            LoadMask(Instrument=mtd['__van'].getInstrument().getName(),
                     InputFile=self.getProperty("MaskFile").value,
                     OutputWorkspace='__mask')
            MaskDetectors(Workspace='__van', MaskedWorkspace='__mask')
            DeleteWorkspace('__mask')

        ConvertUnits(InputWorkspace='__van',
                     OutputWorkspace='__van',
                     Target='Momentum')
        Rebin(InputWorkspace='__van',
              OutputWorkspace='__van',
              Params=rebin_param)
        CropWorkspace(InputWorkspace='__van',
                      OutputWorkspace='__van',
                      XMin=XMin,
                      XMax=XMax)

        if _anvred:
            AnvredCorrection(InputWorkspace='__van',
                             OutputWorkspace='__van',
                             LinearScatteringCoef=self.getProperty(
                                 "LinearScatteringCoef").value,
                             LinearAbsorptionCoef=self.getProperty(
                                 "LinearAbsorptionCoef").value,
                             Radius=self.getProperty("Radius").value,
                             OnlySphericalAbsorption='1',
                             PowerLambda='0')

        # Create solid angle
        Rebin(InputWorkspace='__van',
              OutputWorkspace=_SA_name,
              Params=rebin_param,
              PreserveEvents=False)

        # Create flux
        if _grouping:
            GroupDetectors(InputWorkspace='__van',
                           OutputWorkspace='__van',
                           MapFile=self.getProperty("GroupingFile").value)
        else:
            SumSpectra(InputWorkspace='__van', OutputWorkspace='__van')

        Rebin(InputWorkspace='__van',
              OutputWorkspace='__van',
              Params=rebin_param)
        flux = mtd['__van']
        for i in range(flux.getNumberHistograms()):
            el = flux.getSpectrum(i)
            if flux.readY(i)[0] > 0:
                el.divide(flux.readY(i)[0], flux.readE(i)[0])
        SortEvents(InputWorkspace='__van', SortBy="X Value")
        IntegrateFlux(InputWorkspace='__van',
                      OutputWorkspace=_Flux_name,
                      NPoints=10000)
        DeleteWorkspace('__van')

        self.setProperty("SolidAngleOutputWorkspace", mtd[_SA_name])
        self.setProperty("FluxOutputWorkspace", mtd[_Flux_name])
Esempio n. 6
0
    def load_and_group(self, runs: List[str]) -> IMDHistoWorkspace:
        """
        Load the data with given grouping
        """
        # grouping config
        grouping = self.getProperty("Grouping").value
        if grouping == 'None':
            grouping = 1
        else:
            grouping = 2 if grouping == '2x2' else 4
        number_of_runs = len(runs)

        x_dim = 480 * 8 // grouping
        y_dim = 512 // grouping

        data_array = np.empty((number_of_runs, x_dim, y_dim), dtype=np.float64)

        s1_array = []
        duration_array = []
        run_number_array = []
        monitor_count_array = []

        progress = Progress(self, 0.0, 1.0, number_of_runs + 3)

        for n, run in enumerate(runs):
            progress.report('Loading: ' + run)
            with h5py.File(run, 'r') as f:
                bc = np.zeros((512 * 480 * 8), dtype=np.int64)
                for b in range(8):
                    bc += np.bincount(f['/entry/bank' + str(b + 1) +
                                        '_events/event_id'].value,
                                      minlength=512 * 480 * 8)
                bc = bc.reshape((480 * 8, 512))
                if grouping == 2:
                    bc = bc[::2, ::2] + bc[1::2, ::2] + bc[::2,
                                                           1::2] + bc[1::2,
                                                                      1::2]
                elif grouping == 4:
                    bc = bc[::4, ::4] + bc[1::4, ::4] + bc[2::4, ::4] + bc[3::4, ::4] + bc[::4, 1::4] + bc[1::4, 1::4] + bc[2::4, 1::4] + \
                         bc[3::4, 1::4] + bc[::4, 2::4] + bc[1::4, 2::4] + bc[2::4, 2::4] + bc[3::4, 2::4] + bc[::4, 3::4] + \
                         bc[1::4, 3::4] + bc[2::4, 3::4] + bc[3::4, 3::4]
                data_array[n] = bc
                s1_array.append(
                    f['/entry/DASlogs/HB2C:Mot:s1.RBV/average_value'].value[0])
                duration_array.append(float(f['/entry/duration'].value[0]))
                run_number_array.append(float(f['/entry/run_number'].value[0]))
                monitor_count_array.append(
                    float(f['/entry/monitor1/total_counts'].value[0]))

        progress.report('Creating MDHistoWorkspace')
        createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace",
                                                 enableLogging=False)
        createWS_alg.setProperty("SignalInput", data_array)
        createWS_alg.setProperty("ErrorInput", np.sqrt(data_array))
        createWS_alg.setProperty("Dimensionality", 3)
        createWS_alg.setProperty(
            "Extents", '0.5,{},0.5,{},0.5,{}'.format(y_dim + 0.5, x_dim + 0.5,
                                                     number_of_runs + 0.5))
        createWS_alg.setProperty(
            "NumberOfBins", '{},{},{}'.format(y_dim, x_dim, number_of_runs))
        createWS_alg.setProperty("Names", 'y,x,scanIndex')
        createWS_alg.setProperty("Units", 'bin,bin,number')
        createWS_alg.execute()
        outWS = createWS_alg.getProperty("OutputWorkspace").value

        progress.report('Getting IDF')
        # Get the instrument and some logs from the first file; assume the rest are the same
        _tmp_ws = LoadEventNexus(runs[0],
                                 MetaDataOnly=True,
                                 EnableLogging=False)
        # The following logs should be the same for all runs
        RemoveLogs(
            _tmp_ws,
            KeepLogs=
            'HB2C:Mot:detz,HB2C:Mot:detz.RBV,HB2C:Mot:s2,HB2C:Mot:s2.RBV,'
            'HB2C:Mot:sgl,HB2C:Mot:sgl.RBV,HB2C:Mot:sgu,HB2C:Mot:sgu.RBV,'
            'run_title,start_time,experiment_identifier,HB2C:CS:CrystalAlign:UBMatrix',
            EnableLogging=False)

        time_ns_array = _tmp_ws.run().startTime().totalNanoseconds(
        ) + np.append(0,
                      np.cumsum(duration_array) * 1e9)[:-1]

        try:
            ub = np.array(re.findall(
                r'-?\d+\.*\d*',
                _tmp_ws.run().getProperty(
                    'HB2C:CS:CrystalAlign:UBMatrix').value[0]),
                          dtype=float).reshape(3, 3)
            sgl = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgl.RBV').value[0])  # 'HB2C:Mot:sgl.RBV,1,0,0,-1'
            sgu = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgu.RBV').value[0])  # 'HB2C:Mot:sgu.RBV,0,0,1,-1'
            sgl_a = np.array([[1, 0, 0], [0, np.cos(sgl),
                                          np.sin(sgl)],
                              [0, -np.sin(sgl), np.cos(sgl)]])
            sgu_a = np.array([[np.cos(sgu), np.sin(sgu), 0],
                              [-np.sin(sgu), np.cos(sgu), 0], [0, 0, 1]])
            UB = sgl_a.dot(sgu_a).dot(
                ub)  # Apply the Goniometer tilts to the UB matrix
            SetUB(_tmp_ws, UB=UB, EnableLogging=False)
        except (RuntimeError, ValueError):
            SetUB(_tmp_ws, EnableLogging=False)

        if grouping > 1:
            _tmp_group, _, _ = CreateGroupingWorkspace(InputWorkspace=_tmp_ws,
                                                       EnableLogging=False)

            group_number = 0
            for x in range(0, 480 * 8, grouping):
                for y in range(0, 512, grouping):
                    group_number += 1
                    for j in range(grouping):
                        for i in range(grouping):
                            _tmp_group.dataY(y + i +
                                             (x + j) * 512)[0] = group_number

            _tmp_ws = GroupDetectors(InputWorkspace=_tmp_ws,
                                     CopyGroupingFromWorkspace=_tmp_group,
                                     EnableLogging=False)
            DeleteWorkspace(_tmp_group, EnableLogging=False)

        progress.report('Adding logs')

        # Hack: ConvertToMD is needed so that a deep copy of the ExperimentInfo can happen
        # outWS.addExperimentInfo(_tmp_ws) # This doesn't work but should, when you delete `ws` `outWS` also loses it's ExperimentInfo
        _tmp_ws = Rebin(_tmp_ws, '0,1,2', EnableLogging=False)
        _tmp_ws = ConvertToMD(_tmp_ws,
                              dEAnalysisMode='Elastic',
                              EnableLogging=False,
                              PreprocDetectorsWS='__PreprocessedDetectorsWS')

        preprocWS = mtd['__PreprocessedDetectorsWS']
        twotheta = preprocWS.column(2)
        azimuthal = preprocWS.column(3)

        outWS.copyExperimentInfos(_tmp_ws)
        DeleteWorkspace(_tmp_ws, EnableLogging=False)
        DeleteWorkspace('__PreprocessedDetectorsWS', EnableLogging=False)
        # end Hack

        add_time_series_property('s1',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, s1_array)
        outWS.getExperimentInfo(0).run().getProperty('s1').units = 'deg'
        add_time_series_property('duration',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, duration_array)
        outWS.getExperimentInfo(0).run().getProperty(
            'duration').units = 'second'
        outWS.getExperimentInfo(0).run().addProperty('run_number',
                                                     run_number_array, True)
        add_time_series_property('monitor_count',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, monitor_count_array)
        outWS.getExperimentInfo(0).run().addProperty('twotheta', twotheta,
                                                     True)
        outWS.getExperimentInfo(0).run().addProperty('azimuthal', azimuthal,
                                                     True)

        setGoniometer_alg = self.createChildAlgorithm("SetGoniometer",
                                                      enableLogging=False)
        setGoniometer_alg.setProperty("Workspace", outWS)
        setGoniometer_alg.setProperty("Axis0", 's1,0,1,0,1')
        setGoniometer_alg.setProperty("Average", False)
        setGoniometer_alg.execute()

        return outWS
Esempio n. 7
0
def create_group_from_string(input_workspace, grouping_string):
    from mantid.simpleapi import GroupDetectors
    return GroupDetectors(InputWorkspace=input_workspace,
                          Behaviour='Average',
                          SpectraList=get_group_from_string(grouping_string),
                          StoreInADS=False)
Esempio n. 8
0
 def PyExec(self):
     raw_ws = self.getProperty('InputWorkspace').value
     sample_geometry = self.getPropertyValue('SampleGeometry')
     sample_material = self.getPropertyValue('SampleMaterial')
     cal_file_name = self.getPropertyValue('CalFileName')
     SetSample(InputWorkspace=raw_ws,
               Geometry=sample_geometry,
               Material=sample_material)
     # find the closest monitor to the sample for incident spectrum
     raw_spec_info = raw_ws.spectrumInfo()
     incident_index = None
     for i in range(raw_spec_info.size()):
         if raw_spec_info.isMonitor(i):
             l2 = raw_spec_info.position(i)[2]
             if not incident_index:
                 incident_index = i
             else:
                 if raw_spec_info.position(incident_index)[2] < l2 < 0:
                     incident_index = i
     monitor = ExtractSpectra(InputWorkspace=raw_ws, WorkspaceIndexList=[incident_index])
     monitor = ConvertUnits(InputWorkspace=monitor, Target="Wavelength")
     x_data = monitor.dataX(0)
     min_x = np.min(x_data)
     max_x = np.max(x_data)
     width_x = (max_x - min_x) / x_data.size
     fit_spectra = FitIncidentSpectrum(InputWorkspace=monitor,
                                       BinningForCalc=[min_x, 1 * width_x, max_x],
                                       BinningForFit=[min_x, 10 * width_x, max_x],
                                       FitSpectrumWith="CubicSpline")
     self_scattering_correction = CalculatePlaczekSelfScattering(InputWorkspace=raw_ws,
                                                                 IncidentSpectra=fit_spectra,
                                                                 ScaleByPackingFraction=False,
                                                                 Version=1)
     # Convert to Q
     self_scattering_correction = ConvertUnits(InputWorkspace=self_scattering_correction,
                                               Target="MomentumTransfer", EMode='Elastic')
     cal_workspace = LoadCalFile(InputWorkspace=self_scattering_correction,
                                 CalFileName=cal_file_name,
                                 Workspacename='cal_workspace',
                                 MakeOffsetsWorkspace=False,
                                 MakeMaskWorkspace=False,
                                 MakeGroupingWorkspace=True)
     ssc_min_x, ssc_max_x = float('inf'), float('-inf')
     for index in range(self_scattering_correction.getNumberHistograms()):
         spec_info = self_scattering_correction.spectrumInfo()
         if not spec_info.isMasked(index) and not spec_info.isMonitor(index):
             ssc_x_data = np.ma.masked_invalid(self_scattering_correction.dataX(index))
             if np.min(ssc_x_data) < ssc_min_x:
                 ssc_min_x = np.min(ssc_x_data)
             if np.max(ssc_x_data) > ssc_max_x:
                 ssc_max_x = np.max(ssc_x_data)
     ssc_width_x = (ssc_max_x - ssc_min_x) / ssc_x_data.size
     # TO DO: calculate rebin parameters per group
     # and run GroupDetectors on each separately
     self_scattering_correction = Rebin(InputWorkspace=self_scattering_correction,
                                        Params=[ssc_min_x, ssc_width_x, ssc_max_x],
                                        IgnoreBinErrors=True)
     self_scattering_correction = GroupDetectors(InputWorkspace=self_scattering_correction,
                                                 CopyGroupingFromWorkspace='cal_workspace_group')
     n_pixel = np.zeros(self_scattering_correction.getNumberHistograms())
     for i in range(cal_workspace.getNumberHistograms()):
         grouping = cal_workspace.dataY(i)
         if grouping[0] > 0:
             n_pixel[int(grouping[0] - 1)] += 1
     correction_ws = CreateWorkspace(DataY=n_pixel, DataX=[0, 1],
                                     NSpec=self_scattering_correction.getNumberHistograms())
     self_scattering_correction = Divide(LHSWorkspace=self_scattering_correction, RHSWorkspace=correction_ws)
     DeleteWorkspace('cal_workspace_group')
     DeleteWorkspace(correction_ws)
     DeleteWorkspace(fit_spectra)
     DeleteWorkspace(monitor)
     DeleteWorkspace(raw_ws)
     self.setProperty('OutputWorkspace', self_scattering_correction)
def group_spectra(workspace_name, masked_detectors, method, group_file=None, group_ws=None):
    """
    Groups spectra in a given workspace according to the Workflow.GroupingMethod and
    Workflow.GroupingFile parameters and GroupingPolicy property.

    @param workspace_name Name of workspace to group spectra of
    @param masked_detectors List of spectra numbers to mask
    @param method Grouping method (IPF, All, Individual, File, Workspace)
    @param group_file File for File method
    @param group_ws Workspace for Workspace method
    """
    from mantid.simpleapi import (MaskDetectors, GroupDetectors)

    instrument = mtd[workspace_name].getInstrument()

    # If grouping as per he IPF is desired
    if method == 'IPF':
        # Get the grouping method from the parameter file
        try:
            grouping_method = instrument.getStringParameter('Workflow.GroupingMethod')[0]
        except IndexError:
            grouping_method = 'Individual'

    else:
        # Otherwise use the value of GroupingPolicy
        grouping_method = method

    logger.information('Grouping method for workspace %s is %s' % (workspace_name, grouping_method))

    if grouping_method == 'Individual':
        # Nothing to do here
        return

    elif grouping_method == 'All':
        # Get a list of all spectra minus those which are masked
        num_spec = mtd[workspace_name].getNumberHistograms()
        spectra_list = [spec for spec in range(0, num_spec) if spec not in masked_detectors]

        # Apply the grouping
        GroupDetectors(InputWorkspace=workspace_name,
                       OutputWorkspace=workspace_name,
                       Behaviour='Average',
                       WorkspaceIndexList=spectra_list)

    elif grouping_method == 'File':
        # Get the filename for the grouping file
        if group_file is not None:
            grouping_file = group_file
        else:
            try:
                grouping_file = instrument.getStringParameter('Workflow.GroupingFile')[0]
            except IndexError:
                raise RuntimeError('Cannot get grouping file from properties or IPF.')

        # If the file is not found assume it is in the grouping files directory
        if not os.path.isfile(grouping_file):
            grouping_file = os.path.join(config.getString('groupingFiles.directory'), grouping_file)

        # If it is still not found just give up
        if not os.path.isfile(grouping_file):
            raise RuntimeError('Cannot find grouping file: %s' % grouping_file)

        # Mask detectors if required
        if len(masked_detectors) > 0:
            MaskDetectors(Workspace=workspace_name,
                          WorkspaceIndexList=masked_detectors)

        # Apply the grouping
        GroupDetectors(InputWorkspace=workspace_name,
                       OutputWorkspace=workspace_name,
                       Behaviour='Average',
                       MapFile=grouping_file)

    elif grouping_method == 'Workspace':
        # Apply the grouping
        GroupDetectors(InputWorkspace=workspace_name,
                       OutputWorkspace=workspace_name,
                       Behaviour='Average',
                       CopyGroupingFromWorkspace=group_ws)

    else:
        raise RuntimeError('Invalid grouping method %s for workspace %s' % (grouping_method, workspace_name))