def _group_spectra(self, ws):
     """
         Groups the spectrum axis by summing spectra
         @param ws : the input workspace
     """
     new_axis = []
     start_index = 0
     axis = mtd[ws].getAxis(1).extractValues()
     grouped = self._hide('grouped')
     name = grouped
     while start_index < len(axis):
         end = axis[start_index] + self._rebin_width
         end_index = np.argwhere(axis < end)[-1][0]
         SumSpectra(InputWorkspace=ws, OutputWorkspace=name,
                    StartWorkspaceIndex=int(start_index), EndWorkspaceIndex=int(end_index))
         count = end_index - start_index + 1
         Scale(InputWorkspace=name, OutputWorkspace=name, Factor=1./count)
         new_axis.append(np.sum(axis[start_index:end_index + 1]) / count)
         if name != grouped:
             AppendSpectra(InputWorkspace1=grouped, InputWorkspace2=name, OutputWorkspace=grouped)
             DeleteWorkspace(name)
         start_index = end_index + 1
         name = self._hide('ws_{0}'.format(start_index))
     spectrum_axis = NumericAxis.create(len(new_axis))
     for i in range(len(new_axis)):
         spectrum_axis.setValue(i, new_axis[i])
     mtd[grouped].replaceAxis(1, spectrum_axis)
     RenameWorkspace(InputWorkspace=grouped, OutputWorkspace=ws)
 def _group_spectra(self, ws):
     """
         Groups the spectrum axis by summing spectra
         @param ws : the input workspace
     """
     new_axis = []
     start_index = 0
     axis = mtd[ws].getAxis(1).extractValues()
     grouped = self._hide('grouped')
     name = grouped
     while start_index < len(axis):
         end = axis[start_index] + self._rebin_width
         end_index = np.argwhere(axis < end)[-1][0]
         SumSpectra(InputWorkspace=ws, OutputWorkspace=name,
                    StartWorkspaceIndex=int(start_index), EndWorkspaceIndex=int(end_index))
         count = end_index - start_index + 1
         Scale(InputWorkspace=name, OutputWorkspace=name, Factor=1./count)
         new_axis.append(np.sum(axis[start_index:end_index + 1]) / count)
         if name != grouped:
             AppendSpectra(InputWorkspace1=grouped, InputWorkspace2=name, OutputWorkspace=grouped)
             DeleteWorkspace(name)
         start_index = end_index + 1
         name = self._hide('ws_{0}'.format(start_index))
     spectrum_axis = NumericAxis.create(len(new_axis))
     for i in range(len(new_axis)):
         spectrum_axis.setValue(i, new_axis[i])
     mtd[grouped].replaceAxis(1, spectrum_axis)
     RenameWorkspace(InputWorkspace=grouped, OutputWorkspace=ws)
Exemplo n.º 3
0
 def _format_width_workspace(width_workspace, temperatures, run_numbers,
                             x_axis_is_temperature):
     number_of_temperatures = len(temperatures)
     axis = NumericAxis.create(number_of_temperatures)
     for index in range(number_of_temperatures):
         # The slice here is to make the plot versus number less cluttered/messy when using 5 or more digits.
         value = float(
             temperatures[index]) if x_axis_is_temperature else float(
                 run_numbers[index][-3:])
         axis.setValue(index, value)
     width_workspace.replaceAxis(1, axis)
     width_workspace.setYUnitLabel("Temperature")
Exemplo n.º 4
0
def _create_workspace_for_group_plot(
        plot_type: SpectraSelection, workspaces: List[Workspace],
        plot_index: int, log_name: str,
        custom_log_values: List[float]) -> MatrixWorkspace:
    _validate_workspace_choices(workspaces, plot_index)

    number_of_workspaces = len(workspaces)

    first_ws = workspaces[0]
    first_blocksize = first_ws.blocksize()

    if plot_type == SpectraSelection.Contour:
        x_size = first_blocksize + 1
    else:
        x_size = first_blocksize

    matrix_ws = WorkspaceFactory.Instance().create(
        parent=first_ws,
        NVectors=number_of_workspaces,
        XLength=x_size,
        YLength=first_blocksize)

    matrix_ws.setYUnitLabel(first_ws.YUnitLabel())

    log_values = []
    for i in range(number_of_workspaces):
        ws = workspaces[i]
        if isinstance(ws, MatrixWorkspace):
            if plot_type == SpectraSelection.Contour:
                matrix_ws.applyBinEdgesFromAnotherWorkspace(ws, plot_index, i)
            else:
                matrix_ws.applyPointsFromAnotherWorkspace(ws, plot_index, i)

            matrix_ws.setY(i, ws.readY(plot_index))
            matrix_ws.setE(i, ws.readE(plot_index))

            if log_name == "Custom":
                log_values.append(
                    get_single_workspace_log_value(
                        i, log_values=custom_log_values))
            else:
                log_values.append(
                    get_single_workspace_log_value(i,
                                                   matrix_ws=ws,
                                                   log_name=log_name))

    log_values_axis = NumericAxis.create(len(log_values))
    for i in range(len(log_values)):
        log_values_axis.setValue(i, log_values[i])

    matrix_ws.replaceAxis(1, log_values_axis)

    return matrix_ws
Exemplo n.º 5
0
 def test_replace_axis(self):
     x_axis = NumericAxis.create(1)
     x_axis.setValue(0, 0)
     ws1 = WorkspaceCreationHelper.create2DWorkspaceWithFullInstrument(
         2, 1, False)
     ws1.replaceAxis(0, x_axis)
     ws2 = WorkspaceCreationHelper.create2DWorkspaceWithFullInstrument(
         2, 1, False)
     ws2.replaceAxis(0, x_axis)
     try:
         del ws1, ws2
     except:
         self.fail(
             "Segmentation violation when deleting the same axis twice")
Exemplo n.º 6
0
    def _fill_s_2d_workspace(self,
                             s_points=None,
                             workspace=None,
                             protons_number=None,
                             nucleons_number=None):
        from mantid.api import NumericAxis
        from abins.constants import MILLI_EV_TO_WAVENUMBER

        if protons_number is not None:
            s_points = s_points * self.get_cross_section(
                scattering=self._scale_by_cross_section,
                protons_number=protons_number,
                nucleons_number=nucleons_number)

        n_q_values, n_freq_bins = s_points.shape
        n_q_bins = self._q_bins.size
        assert n_q_values + 1 == n_q_bins

        if self._energy_units == 'meV':
            energy_bins = self._bins / MILLI_EV_TO_WAVENUMBER
        else:
            energy_bins = self._bins

        wrk = WorkspaceFactory.create("Workspace2D",
                                      NVectors=n_freq_bins,
                                      XLength=n_q_bins,
                                      YLength=n_q_values)

        freq_axis = NumericAxis.create(n_freq_bins)
        freq_offset = (energy_bins[1] - energy_bins[0]) / 2
        for i, freq in enumerate(energy_bins[1:]):
            wrk.setX(i, self._q_bins)
            wrk.setY(i, s_points[:, i].T)
            freq_axis.setValue(i, freq + freq_offset)
        wrk.replaceAxis(1, freq_axis)

        AnalysisDataService.addOrReplace(workspace, wrk)

        self.set_workspace_units(workspace,
                                 layout="2D",
                                 energy_units=self._energy_units)
Exemplo n.º 7
0
def replace_workspace_axis(wsName, new_values):
    ax1 = NumericAxis.create(len(new_values))
    for i in range(len(new_values)):
        ax1.setValue(i, new_values[i])
    ax1.setUnit('MomentumTransfer')
    mtd[wsName].replaceAxis(1, ax1)      #axis=1 is vertical
Exemplo n.º 8
0
    def PyExec(self):

        workflow_prog = Progress(self, start=0.0, end=1.0, nreports=20)
        self._setup()

        workflow_prog.report('Validating input')
        input_workspace = mtd[self._input_ws]
        num_spectra, num_w = self._CheckHistZero(self._input_ws)
        logger.information('Sample %s has %d Q values & %d w values' %
                           (self._input_ws, num_spectra, num_w))
        self._CheckElimits([self._energy_min, self._energy_max],
                           self._input_ws)

        workflow_prog.report('Cropping Workspace')
        input_ws = '__temp_sqw_moments_cropped'
        crop_alg = self.createChildAlgorithm("CropWorkspace",
                                             enableLogging=False)
        crop_alg.setProperty("InputWorkspace", input_workspace)
        crop_alg.setProperty("XMin", self._energy_min)
        crop_alg.setProperty("XMax", self._energy_max)
        crop_alg.setProperty("OutputWorkspace", input_ws)
        crop_alg.execute()
        mtd.addOrReplace(input_ws,
                         crop_alg.getProperty("OutputWorkspace").value)

        logger.information('Energy range is %f to %f' %
                           (self._energy_min, self._energy_max))

        if self._factor > 0.0:
            workflow_prog.report('Scaling Workspace by factor %f' %
                                 self._factor)
            scale_alg = self.createChildAlgorithm("Scale", enableLogging=False)
            scale_alg.setProperty("InputWorkspace", input_ws)
            scale_alg.setProperty("Factor", self._factor)
            scale_alg.setProperty("Operation", 'Multiply')
            scale_alg.setProperty("OutputWorkspace", input_ws)
            scale_alg.execute()
            logger.information('y(q,w) scaled by %f' % self._factor)

        # calculate delta x
        workflow_prog.report('Converting to point data')
        convert_point_alg = self.createChildAlgorithm("ConvertToPointData",
                                                      enableLogging=False)
        convert_point_alg.setProperty("InputWorkspace", input_ws)
        convert_point_alg.setProperty("OutputWorkspace", input_ws)
        convert_point_alg.execute()
        mtd.addOrReplace(
            input_ws,
            convert_point_alg.getProperty("OutputWorkspace").value)
        x_data = np.asarray(mtd[input_ws].readX(0))
        workflow_prog.report('Creating temporary data workspace')
        x_workspace = "__temp_sqw_moments_x"
        create_alg = self.createChildAlgorithm("CreateWorkspace",
                                               enableLogging=False)
        create_alg.setProperty("DataX", x_data)
        create_alg.setProperty("DataY", x_data)
        create_alg.setProperty("UnitX", "DeltaE")
        create_alg.setProperty("OutputWorkspace", x_workspace)
        create_alg.execute()
        mtd.addOrReplace(x_workspace,
                         create_alg.getProperty("OutputWorkspace").value)

        # calculate moments
        multiply_alg = self.createChildAlgorithm("Multiply",
                                                 enableLogging=False)

        workflow_prog.report('Multiplying Workspaces by moments')
        moments_0 = self._output_ws + '_M0'
        moments_1 = self._output_ws + '_M1'
        multiply_alg.setProperty("LHSWorkspace", x_workspace)
        multiply_alg.setProperty("RHSWorkspace", input_ws)
        multiply_alg.setProperty("OutputWorkspace", moments_1)
        multiply_alg.execute()
        mtd.addOrReplace(moments_1,
                         multiply_alg.getProperty("OutputWorkspace").value)

        moments_2 = self._output_ws + '_M2'
        multiply_alg.setProperty("LHSWorkspace", x_workspace)
        multiply_alg.setProperty("RHSWorkspace", moments_1)
        multiply_alg.setProperty("OutputWorkspace", moments_2)
        multiply_alg.execute()
        mtd.addOrReplace(moments_2,
                         multiply_alg.getProperty("OutputWorkspace").value)

        moments_3 = self._output_ws + '_M3'
        multiply_alg.setProperty("LHSWorkspace", x_workspace)
        multiply_alg.setProperty("RHSWorkspace", moments_2)
        multiply_alg.setProperty("OutputWorkspace", moments_3)
        multiply_alg.execute()
        mtd.addOrReplace(moments_3,
                         multiply_alg.getProperty("OutputWorkspace").value)

        moments_4 = self._output_ws + '_M4'
        multiply_alg.setProperty("LHSWorkspace", x_workspace)
        multiply_alg.setProperty("RHSWorkspace", moments_3)
        multiply_alg.setProperty("OutputWorkspace", moments_4)
        multiply_alg.execute()
        mtd.addOrReplace(moments_4,
                         multiply_alg.getProperty("OutputWorkspace").value)

        workflow_prog.report('Converting to Histogram')
        convert_hist_alg = self.createChildAlgorithm("ConvertToHistogram",
                                                     enableLogging=False)
        convert_hist_alg.setProperty("InputWorkspace", input_ws)
        convert_hist_alg.setProperty("OutputWorkspace", input_ws)
        convert_hist_alg.execute()

        workflow_prog.report('Integrating result')
        integration_alg = self.createChildAlgorithm("Integration",
                                                    enableLogging=False)
        integration_alg.setProperty(
            "InputWorkspace",
            convert_hist_alg.getProperty("OutputWorkspace").value)
        integration_alg.setProperty("OutputWorkspace", moments_0)
        integration_alg.execute()
        mtd.addOrReplace(moments_0,
                         integration_alg.getProperty("OutputWorkspace").value)

        moments = [moments_1, moments_2, moments_3, moments_4]
        divide_alg = self.createChildAlgorithm("Divide", enableLogging=False)
        for moment_ws in moments:
            workflow_prog.report('Processing workspace %s' % moment_ws)
            convert_hist_alg.setProperty("InputWorkspace", moment_ws)
            convert_hist_alg.setProperty("OutputWorkspace", moment_ws)
            convert_hist_alg.execute()

            integration_alg.setProperty(
                "InputWorkspace",
                convert_hist_alg.getProperty("OutputWorkspace").value)
            integration_alg.setProperty("OutputWorkspace", moment_ws)
            integration_alg.execute()

            divide_alg.setProperty(
                "LHSWorkspace",
                integration_alg.getProperty("OutputWorkspace").value)
            divide_alg.setProperty("RHSWorkspace", moments_0)
            divide_alg.setProperty("OutputWorkspace", moment_ws)
            divide_alg.execute()
            mtd.addOrReplace(moment_ws,
                             divide_alg.getProperty("OutputWorkspace").value)

        workflow_prog.report('Deleting Workspaces')
        delete_alg = self.createChildAlgorithm("DeleteWorkspace",
                                               enableLogging=False)
        delete_alg.setProperty("Workspace", input_ws)
        delete_alg.execute()
        delete_alg.setProperty("Workspace", x_workspace)
        delete_alg.execute()

        # create output workspace
        extensions = ['_M0', '_M1', '_M2', '_M3', '_M4']
        transpose_alg = self.createChildAlgorithm("Transpose",
                                                  enableLogging=False)
        convert_hist_alg = self.createChildAlgorithm("ConvertToHistogram",
                                                     enableLogging=False)
        convert_units_alg = self.createChildAlgorithm("ConvertUnits",
                                                      enableLogging=False)
        for ext in extensions:
            ws_name = self._output_ws + ext
            workflow_prog.report('Processing Workspace %s' % ext)
            transpose_alg.setProperty("InputWorkspace", ws_name)
            transpose_alg.setProperty("OutputWorkspace", ws_name)
            transpose_alg.execute()
            convert_hist_alg.setProperty(
                "InputWorkspace",
                transpose_alg.getProperty("OutputWorkspace").value)
            convert_hist_alg.setProperty("OutputWorkspace", ws_name)
            convert_hist_alg.execute()
            convert_units_alg.setProperty(
                "InputWorkspace",
                convert_hist_alg.getProperty("OutputWorkspace").value)
            convert_units_alg.setProperty("Target", 'MomentumTransfer')
            convert_units_alg.setProperty("EMode", 'Indirect')
            convert_units_alg.setProperty("OutputWorkspace", ws_name)
            convert_units_alg.execute()
            mtd.addOrReplace(
                ws_name,
                convert_units_alg.getProperty("OutputWorkspace").value)

            workflow_prog.report('Adding Sample logs to %s' % ws_name)
            copy_alg = self.createChildAlgorithm("CopyLogs",
                                                 enableLogging=False)
            copy_alg.setProperty("InputWorkspace", input_workspace)
            copy_alg.setProperty("OutputWorkspace", ws_name)
            copy_alg.execute()
            add_sample_log_alg = self.createChildAlgorithm("AddSampleLog",
                                                           enableLogging=False)
            add_sample_log_alg.setProperty("Workspace", ws_name)
            add_sample_log_alg.setProperty("LogName", "energy_min")
            add_sample_log_alg.setProperty("LogType", "Number")
            add_sample_log_alg.setProperty("LogText", str(self._energy_min))
            add_sample_log_alg.execute()
            add_sample_log_alg.setProperty("Workspace", ws_name)
            add_sample_log_alg.setProperty("LogName", "energy_max")
            add_sample_log_alg.setProperty("LogType", "Number")
            add_sample_log_alg.setProperty("LogText", str(self._energy_max))
            add_sample_log_alg.execute()
            add_sample_log_alg.setProperty("Workspace", ws_name)
            add_sample_log_alg.setProperty("LogName", "scale_factor")
            add_sample_log_alg.setProperty("LogType", "Number")
            add_sample_log_alg.setProperty("LogText", str(self._factor))
            add_sample_log_alg.execute()

        # Group output workspace
        workflow_prog.report('Appending moments')
        append_alg = self.createChildAlgorithm("AppendSpectra",
                                               enableLogging=False)
        append_alg.setProperty("InputWorkspace1", self._output_ws + '_M0')
        append_alg.setProperty("InputWorkspace2", self._output_ws + '_M1')
        append_alg.setProperty("ValidateInputs", False)
        append_alg.setProperty("OutputWorkspace", self._output_ws)
        append_alg.execute()
        append_alg.setProperty("InputWorkspace1",
                               append_alg.getProperty("OutputWorkspace").value)
        append_alg.setProperty("InputWorkspace2", self._output_ws + '_M2')
        append_alg.setProperty("ValidateInputs", False)
        append_alg.setProperty("OutputWorkspace", self._output_ws)
        append_alg.execute()
        append_alg.setProperty("InputWorkspace1",
                               append_alg.getProperty("OutputWorkspace").value)
        append_alg.setProperty("InputWorkspace2", self._output_ws + '_M3')
        append_alg.setProperty("ValidateInputs", False)
        append_alg.setProperty("OutputWorkspace", self._output_ws)
        append_alg.execute()
        append_alg.setProperty("InputWorkspace1",
                               append_alg.getProperty("OutputWorkspace").value)
        append_alg.setProperty("InputWorkspace2", self._output_ws + '_M4')
        append_alg.setProperty("ValidateInputs", False)
        append_alg.setProperty("OutputWorkspace", self._output_ws)
        append_alg.execute()
        mtd.addOrReplace(self._output_ws,
                         append_alg.getProperty("OutputWorkspace").value)
        delete_alg.setProperty("Workspace", self._output_ws + '_M0')
        delete_alg.execute()
        delete_alg.setProperty("Workspace", self._output_ws + '_M1')
        delete_alg.execute()
        delete_alg.setProperty("Workspace", self._output_ws + '_M2')
        delete_alg.execute()
        delete_alg.setProperty("Workspace", self._output_ws + '_M3')
        delete_alg.execute()
        delete_alg.setProperty("Workspace", self._output_ws + '_M4')
        delete_alg.execute()

        # Create a new vertical axis for the Q and Q**2 workspaces
        y_axis = NumericAxis.create(5)
        for idx in range(5):
            y_axis.setValue(idx, idx)
        mtd[self._output_ws].replaceAxis(1, y_axis)

        self.setProperty("OutputWorkspace", self._output_ws)
Exemplo n.º 9
0
 def _integrate_iq(self):
     """
     Produces I(Q) or I(Phi,Q) using Q1DWeighted
     """
     if self._resolution == 'MildnerCarpenter':
         self._setup_mildner_carpenter()
     run = mtd[self._input_ws].getRun()
     q_min = run.getLogData('qmin').value
     q_max = run.getLogData('qmax').value
     self.log().information('Using qmin={0:.2f}, qmax={1:.2f}'.format(q_min, q_max))
     pixel_height = run.getLogData('pixel_height').value
     pixel_width = run.getLogData('pixel_width').value
     pixel_size = pixel_height if pixel_height >= pixel_width else pixel_width
     binning_factor = self.getProperty('BinningFactor').value
     wavelength = 0. # for TOF mode there is no wavelength
     if run.hasProperty('wavelength'):
         wavelength = run.getLogData('wavelength').value
     l2 = run.getLogData('l2').value
     beamY = 0.
     if run.hasProperty('BeamCenterY'):
         beamY = run.getLogData('BeamCenterY').value
     q_binning = self._get_iq_binning(q_min, q_max, pixel_size, wavelength, l2, binning_factor, -beamY)
     n_wedges = self.getProperty('NumberOfWedges').value
     pixel_division = self.getProperty('NPixelDivision').value
     gravity = wavelength == 0.
     if self._output_type == 'I(Q)':
         wedge_ws = self.getPropertyValue('WedgeWorkspace')
         wedge_angle = self.getProperty('WedgeAngle').value
         wedge_offset = self.getProperty('WedgeOffset').value
         asymm_wedges = self.getProperty('AsymmetricWedges').value
         Q1DWeighted(InputWorkspace=self._input_ws, OutputWorkspace=self._output_ws,
                     NumberOfWedges=n_wedges, OutputBinning=q_binning, AccountForGravity=gravity,
                     WedgeWorkspace=wedge_ws, WedgeAngle=wedge_angle, WedgeOffset=wedge_offset,
                     AsymmetricWedges=asymm_wedges, NPixelDivision=pixel_division)
         if self._resolution == 'MildnerCarpenter':
             x = mtd[self._output_ws].readX(0)
             mid_x = (x[1:] + x[:-1]) / 2
             res = self._deltaQ(mid_x)
             mtd[self._output_ws].setDx(0, res)
             if n_wedges != 0:
                 for wedge in range(n_wedges):
                     mtd[wedge_ws].getItem(wedge).setDx(0, res)
         if n_wedges != 0:
             self.setProperty('WedgeWorkspace', mtd[wedge_ws])
     elif self._output_type == 'I(Phi,Q)':
         wedge_ws = '__wedges' + self._input_ws
         iq_ws = '__iq' + self._input_ws
         wedge_angle = 360./n_wedges
         azimuth_axis = NumericAxis.create(n_wedges)
         azimuth_axis.setUnit("Degrees")
         for i in range(n_wedges):
             azimuth_axis.setValue(i, i * wedge_angle)
         Q1DWeighted(InputWorkspace=self._input_ws, OutputWorkspace=iq_ws, NumberOfWedges=n_wedges,
                     NPixelDivision=pixel_division, OutputBinning=q_binning, WedgeWorkspace=wedge_ws,
                     WedgeAngle=wedge_angle, AsymmetricWedges=True, AccountForGravity=gravity)
         DeleteWorkspace(iq_ws)
         ConjoinSpectra(InputWorkspaces=wedge_ws, OutputWorkspace=self._output_ws)
         mtd[self._output_ws].replaceAxis(1, azimuth_axis)
         DeleteWorkspace(wedge_ws)
         if self._resolution == 'MildnerCarpenter':
             x = mtd[self._output_ws].readX(0)
             mid_x = (x[1:] + x[:-1]) / 2
             res = self._deltaQ(mid_x)
             for i in range(mtd[self._output_ws].getNumberHistograms()):
                 mtd[self._output_ws].setDx(i, res)
    def _fit_bragg_peaks(self, ws, yig_peaks):
        """ Fits peaks defined in the yig_peaks argument
        returns a workspace with fitted peak positions
        on the Y axis and the expected positions on the X axis"""
        fitting_method = self.getPropertyValue('FittingMethod')
        max_n_peaks = len(max(yig_peaks, key=len))
        conjoined_peak_fit_name = 'conjoined_peak_fit_{}'.format(self.getPropertyValue('FitOutputWorkspace'))
        # if the name exists in ADS, delete it
        if conjoined_peak_fit_name in mtd:
            DeleteWorkspace(Workspace=conjoined_peak_fit_name)
        ws_names = []
        background = 'name=FlatBackground, A0=1e-4'
        function = "name=Gaussian, PeakCentre={0}, Height={1}, Sigma={2}"
        constraints = "f{0}.Height > 0, f{0}.Sigma < {1}, {2} < f{0}.PeakCentre < {3}"
        for pixel_no in range(mtd[ws].getNumberHistograms()):
            # create the needed columns in the output workspace
            results_x = np.zeros(max_n_peaks)
            results_y = np.zeros(max_n_peaks)
            results_e = np.zeros(max_n_peaks)
            single_spectrum_peaks = yig_peaks[pixel_no]
            ws_name = 'pixel_{}'.format(pixel_no)
            fit_function = [background]
            if len(single_spectrum_peaks) >= 1:
                if fitting_method == 'Individual':
                    ws_name += '_peak_{}'
                peak_no = 0
                function_no = 0
                fit_constraints = []
                for peak_intensity, peak_centre_guess, peak_centre_expected in single_spectrum_peaks:
                    function_no += 1
                    fit_function.append(function.format(float(peak_centre_guess), peak_intensity, 0.5*self._peakWidth))
                    fit_constraints.append(constraints.format(function_no, self._peakWidth,
                                                              peak_centre_guess - self._minDistance,
                                                              peak_centre_guess + self._minDistance))
                    if fitting_method == 'Individual':
                        name = ws_name.format(peak_no)
                        ws_names.append(name)
                        results_x, results_y, results_e = self._call_fit(ws, name, fit_function, fit_constraints,
                                                                         peak_no, pixel_no, single_spectrum_peaks,
                                                                         results_x, results_y, results_e,
                                                                         startX=peak_centre_expected - self._minDistance,
                                                                         endX=peak_centre_expected + self._minDistance)
                        fit_function = [background]
                        fit_constraints = []
                        function_no = 0
                    peak_no += 1

                if fitting_method == 'Global':
                    ws_names.append(ws_name)
                    results_x, results_y, results_e = self._call_fit(ws, ws_name, fit_function, fit_constraints,
                                                                     0, pixel_no, single_spectrum_peaks,
                                                                     results_x, results_y, results_e)
            if fitting_method != 'None':
                CreateWorkspace(OutputWorkspace='ws',
                                DataX=results_x,
                                DataY=results_y,
                                DataE=results_e,
                                UnitX='degrees',
                                NSpec=1)
                try:
                    ConjoinWorkspaces(InputWorkspace1=conjoined_peak_fit_name, InputWorkspace2='ws',
                                      CheckOverlapping=False,
                                      YAxisLabel='TwoTheta_fit',
                                      YAxisUnit='degrees')
                except ValueError:
                    RenameWorkspace(InputWorkspace='ws', OutputWorkspace=conjoined_peak_fit_name)
            else:
                ws_names.append(ws_name)
                single_spectrum_name = '{}_single_spectrum'.format(ws_name)
                ExtractSpectra(InputWorkspace=ws, OutputWorkspace=single_spectrum_name,
                               WorkspaceIndexList=[pixel_no])
                EvaluateFunction(Function=';'.join(fit_function),
                                 InputWorkspace=single_spectrum_name,
                                 OutputWorkspace=ws_name)
                DeleteWorkspace(Workspace=single_spectrum_name)

        if fitting_method in ['Individual', 'Global']:
            y_axis = NumericAxis.create(self._D7NumberPixels)
            for pixel_no in range(self._D7NumberPixels):
                y_axis.setValue(pixel_no, pixel_no+1)
            mtd[conjoined_peak_fit_name].replaceAxis(1, y_axis)
            # clean up after fitting:
            DeleteWorkspaces(['out_Parameters', 'out_NormalisedCovarianceMatrix'])

        single_peak_fit_results_name = 'peak_fits_{}'.format(self.getPropertyValue('FitOutputWorkspace'))
        GroupWorkspaces(InputWorkspaces=ws_names, OutputWorkspace=single_peak_fit_results_name)

        return conjoined_peak_fit_name, single_peak_fit_results_name
Exemplo n.º 11
0
    def _integrate_iq(self, ws_in, ws_out, panel=None):
        """
        Produces I(Q) or I(Phi,Q) using Q1DWeighted
        """
        if self._resolution == 'MildnerCarpenter':
            self._setup_mildner_carpenter()
        elif self._resolution == 'DirectBeam':
            self._setup_directbeam_resolution()
        run = mtd[ws_in].getRun()
        q_min_name = 'qmin'
        q_max_name = 'qmax'
        if panel:
            q_min_name += ('_' + panel)
            q_max_name += ('_' + panel)
        q_min = run.getLogData(q_min_name).value
        q_max = run.getLogData(q_max_name).value
        self.log().information(
            'From sample logs qmin={0:.5f}, qmax={1:.5f}'.format(q_min, q_max))
        instrument = mtd[self._input_ws].getInstrument()
        pixel_width = instrument.getNumberParameter('x-pixel-size')[0] / 1000
        pixel_height = instrument.getNumberParameter('y-pixel-size')[0] / 1000

        pixel_size = pixel_height if pixel_height >= pixel_width else pixel_width
        binning_factor = self.getProperty('BinningFactor').value
        wavelength = 0.  # for TOF mode there is no wavelength
        if run.hasProperty('wavelength'):
            wavelength = run.getLogData('wavelength').value
        l2 = run.getLogData('l2').value
        q_binning = self._get_iq_binning(q_min, q_max, pixel_size, wavelength,
                                         l2, binning_factor)
        n_wedges = self.getProperty('NumberOfWedges').value
        pixel_division = self.getProperty('NPixelDivision').value
        gravity = wavelength == 0.
        shape_table = self.getProperty('ShapeTable').value
        if self._output_type == 'I(Q)':
            if panel:
                # do not process wedges for panels
                n_wedges = 0
                shape_table = ''
            wedge_ws = self.getPropertyValue('WedgeWorkspace')
            wedge_angle = self.getProperty('WedgeAngle').value
            wedge_offset = self.getProperty('WedgeOffset').value
            asymm_wedges = self.getProperty('AsymmetricWedges').value
            Q1DWeighted(InputWorkspace=ws_in,
                        OutputWorkspace=ws_out,
                        NumberOfWedges=n_wedges,
                        OutputBinning=q_binning,
                        AccountForGravity=gravity,
                        WedgeWorkspace=wedge_ws,
                        WedgeAngle=wedge_angle,
                        WedgeOffset=wedge_offset,
                        AsymmetricWedges=asymm_wedges,
                        NPixelDivision=pixel_division,
                        ShapeTable=shape_table)
            if shape_table:
                # if there is a shape table, the final number of wedges cannot be known beforehand
                # (because of possible symmetry choices)
                n_wedges = mtd[wedge_ws].size()
            if self._resolution != 'None':
                res = self._set_resolution(ws_out)
                for wedge in range(n_wedges):
                    self._set_resolution(mtd[wedge_ws][wedge].name(), res)
            if n_wedges != 0:
                self.setProperty('WedgeWorkspace', mtd[wedge_ws])
        elif self._output_type == 'I(Phi,Q)':
            wedge_ws = '__wedges' + ws_in
            iq_ws = '__iq' + ws_in
            wedge_angle = 360. / n_wedges
            azimuth_axis = NumericAxis.create(n_wedges)
            azimuth_axis.setUnit("Phi")
            for i in range(n_wedges):
                azimuth_axis.setValue(i, i * wedge_angle)
            Q1DWeighted(InputWorkspace=ws_in,
                        OutputWorkspace=iq_ws,
                        NumberOfWedges=n_wedges,
                        NPixelDivision=pixel_division,
                        OutputBinning=q_binning,
                        WedgeWorkspace=wedge_ws,
                        WedgeAngle=wedge_angle,
                        AsymmetricWedges=True,
                        AccountForGravity=gravity)
            DeleteWorkspace(iq_ws)
            ConjoinSpectra(InputWorkspaces=wedge_ws, OutputWorkspace=ws_out)
            mtd[ws_out].replaceAxis(1, azimuth_axis)
            DeleteWorkspace(wedge_ws)
            self._set_resolution(ws_out)
Exemplo n.º 12
0
 def test_constructor_methods_return_the_correct_type(self):
     self.assertTrue(isinstance(NumericAxis.create(2), NumericAxis))
     self.assertTrue(isinstance(SpectraAxis.create(2), SpectraAxis))
     self.assertTrue(isinstance(TextAxis.create(2), TextAxis))
Exemplo n.º 13
0
 def test_constructor_methods_return_the_correct_type(self):
     self.assertTrue(isinstance(NumericAxis.create(2), NumericAxis))
     self.assertTrue(
         isinstance(SpectraAxis.create(self._test_ws), SpectraAxis))
     self.assertTrue(isinstance(TextAxis.create(2), TextAxis))
Exemplo n.º 14
0
    def PyExec(self):

        workflow_prog = Progress(self, start=0.0, end=1.0, nreports=20)
        self._setup()

        workflow_prog.report('Validating input')
        input_workspace = mtd[self._input_ws]
        num_spectra, num_w = self._CheckHistZero(self._input_ws)
        logger.information('Sample %s has %d Q values & %d w values' % (self._input_ws, num_spectra, num_w))
        self._CheckElimits([self._energy_min, self._energy_max], self._input_ws)

        workflow_prog.report('Cropping Workspace')
        input_ws = '__temp_sqw_moments_cropped'
        crop_alg = self.createChildAlgorithm("CropWorkspace", enableLogging=False)
        crop_alg.setProperty("InputWorkspace", input_workspace)
        crop_alg.setProperty("XMin", self._energy_min)
        crop_alg.setProperty("XMax", self._energy_max)
        crop_alg.setProperty("OutputWorkspace", input_ws)
        crop_alg.execute()
        mtd.addOrReplace(input_ws, crop_alg.getProperty("OutputWorkspace").value)

        logger.information('Energy range is %f to %f' % (self._energy_min, self._energy_max))

        if self._factor > 0.0:
            workflow_prog.report('Scaling Workspace by factor %f' % self._factor)
            scale_alg = self.createChildAlgorithm("Scale", enableLogging=False)
            scale_alg.setProperty("InputWorkspace", input_ws)
            scale_alg.setProperty("Factor", self._factor)
            scale_alg.setProperty("Operation", 'Multiply')
            scale_alg.setProperty("OutputWorkspace", input_ws)
            scale_alg.execute()
            logger.information('y(q,w) scaled by %f' % self._factor)

        # calculate delta x
        workflow_prog.report('Converting to point data')
        convert_point_alg = self.createChildAlgorithm("ConvertToPointData", enableLogging=False)
        convert_point_alg.setProperty("InputWorkspace", input_ws)
        convert_point_alg.setProperty("OutputWorkspace", input_ws)
        convert_point_alg.execute()
        mtd.addOrReplace(input_ws, convert_point_alg.getProperty("OutputWorkspace").value)
        x_data = np.asarray(mtd[input_ws].readX(0))
        workflow_prog.report('Creating temporary data workspace')
        x_workspace = "__temp_sqw_moments_x"
        create_alg = self.createChildAlgorithm("CreateWorkspace", enableLogging=False)
        create_alg.setProperty("DataX", x_data)
        create_alg.setProperty("DataY", x_data)
        create_alg.setProperty("UnitX", "DeltaE")
        create_alg.setProperty("OutputWorkspace", x_workspace)
        create_alg.execute()
        mtd.addOrReplace(x_workspace, create_alg.getProperty("OutputWorkspace").value)

        # calculate moments
        multiply_alg = self.createChildAlgorithm("Multiply", enableLogging=False)

        workflow_prog.report('Multiplying Workspaces by moments')
        moments_0 = self._output_ws + '_M0'
        moments_1 = self._output_ws + '_M1'
        multiply_alg.setProperty("LHSWorkspace", x_workspace)
        multiply_alg.setProperty("RHSWorkspace", input_ws)
        multiply_alg.setProperty("OutputWorkspace", moments_1)
        multiply_alg.execute()
        mtd.addOrReplace(moments_1, multiply_alg.getProperty("OutputWorkspace").value)

        moments_2 = self._output_ws + '_M2'
        multiply_alg.setProperty("LHSWorkspace", x_workspace)
        multiply_alg.setProperty("RHSWorkspace", moments_1)
        multiply_alg.setProperty("OutputWorkspace", moments_2)
        multiply_alg.execute()
        mtd.addOrReplace(moments_2, multiply_alg.getProperty("OutputWorkspace").value)

        moments_3 = self._output_ws + '_M3'
        multiply_alg.setProperty("LHSWorkspace", x_workspace)
        multiply_alg.setProperty("RHSWorkspace", moments_2)
        multiply_alg.setProperty("OutputWorkspace", moments_3)
        multiply_alg.execute()
        mtd.addOrReplace(moments_3, multiply_alg.getProperty("OutputWorkspace").value)

        moments_4 = self._output_ws + '_M4'
        multiply_alg.setProperty("LHSWorkspace", x_workspace)
        multiply_alg.setProperty("RHSWorkspace", moments_3)
        multiply_alg.setProperty("OutputWorkspace", moments_4)
        multiply_alg.execute()
        mtd.addOrReplace(moments_4, multiply_alg.getProperty("OutputWorkspace").value)

        workflow_prog.report('Converting to Histogram')
        convert_hist_alg = self.createChildAlgorithm("ConvertToHistogram", enableLogging=False)
        convert_hist_alg.setProperty("InputWorkspace", input_ws)
        convert_hist_alg.setProperty("OutputWorkspace", input_ws)
        convert_hist_alg.execute()

        workflow_prog.report('Integrating result')
        integration_alg = self.createChildAlgorithm("Integration", enableLogging=False)
        integration_alg.setProperty("InputWorkspace", convert_hist_alg.getProperty("OutputWorkspace").value)
        integration_alg.setProperty("OutputWorkspace", moments_0)
        integration_alg.execute()
        mtd.addOrReplace(moments_0, integration_alg.getProperty("OutputWorkspace").value)

        moments = [moments_1, moments_2, moments_3, moments_4]
        divide_alg = self.createChildAlgorithm("Divide", enableLogging=False)
        for moment_ws in moments:
            workflow_prog.report('Processing workspace %s' % moment_ws)
            convert_hist_alg.setProperty("InputWorkspace", moment_ws)
            convert_hist_alg.setProperty("OutputWorkspace", moment_ws)
            convert_hist_alg.execute()

            integration_alg.setProperty("InputWorkspace", convert_hist_alg.getProperty("OutputWorkspace").value)
            integration_alg.setProperty("OutputWorkspace", moment_ws)
            integration_alg.execute()

            divide_alg.setProperty("LHSWorkspace", integration_alg.getProperty("OutputWorkspace").value)
            divide_alg.setProperty("RHSWorkspace", moments_0)
            divide_alg.setProperty("OutputWorkspace", moment_ws)
            divide_alg.execute()
            mtd.addOrReplace(moment_ws, divide_alg.getProperty("OutputWorkspace").value)

        workflow_prog.report('Deleting Workspaces')
        delete_alg = self.createChildAlgorithm("DeleteWorkspace", enableLogging=False)
        delete_alg.setProperty("Workspace", input_ws)
        delete_alg.execute()
        delete_alg.setProperty("Workspace", x_workspace)
        delete_alg.execute()

        # create output workspace
        extensions = ['_M0', '_M1', '_M2', '_M3', '_M4']
        transpose_alg = self.createChildAlgorithm("Transpose", enableLogging=False)
        convert_hist_alg = self.createChildAlgorithm("ConvertToHistogram", enableLogging=False)
        convert_units_alg = self.createChildAlgorithm("ConvertUnits", enableLogging=False)
        for ext in extensions:
            ws_name = self._output_ws + ext
            workflow_prog.report('Processing Workspace %s' % ext)
            transpose_alg.setProperty("InputWorkspace", ws_name)
            transpose_alg.setProperty("OutputWorkspace", ws_name)
            transpose_alg.execute()
            convert_hist_alg.setProperty("InputWorkspace", transpose_alg.getProperty("OutputWorkspace").value)
            convert_hist_alg.setProperty("OutputWorkspace", ws_name)
            convert_hist_alg.execute()
            convert_units_alg.setProperty("InputWorkspace", convert_hist_alg.getProperty("OutputWorkspace").value)
            convert_units_alg.setProperty("Target", 'MomentumTransfer')
            convert_units_alg.setProperty("EMode", 'Indirect')
            convert_units_alg.setProperty("OutputWorkspace", ws_name)
            convert_units_alg.execute()
            mtd.addOrReplace(ws_name, convert_units_alg.getProperty("OutputWorkspace").value)

            workflow_prog.report('Adding Sample logs to %s' % ws_name)
            copy_alg = self.createChildAlgorithm("CopyLogs", enableLogging=False)
            copy_alg.setProperty("InputWorkspace", input_workspace)
            copy_alg.setProperty("OutputWorkspace", ws_name)
            copy_alg.execute()
            add_sample_log_alg = self.createChildAlgorithm("AddSampleLog", enableLogging=False)
            add_sample_log_alg.setProperty("Workspace", ws_name)
            add_sample_log_alg.setProperty("LogName", "energy_min")
            add_sample_log_alg.setProperty("LogType", "Number")
            add_sample_log_alg.setProperty("LogText", str(self._energy_min))
            add_sample_log_alg.execute()
            add_sample_log_alg.setProperty("Workspace", ws_name)
            add_sample_log_alg.setProperty("LogName", "energy_max")
            add_sample_log_alg.setProperty("LogType", "Number")
            add_sample_log_alg.setProperty("LogText", str(self._energy_max))
            add_sample_log_alg.execute()
            add_sample_log_alg.setProperty("Workspace", ws_name)
            add_sample_log_alg.setProperty("LogName", "scale_factor")
            add_sample_log_alg.setProperty("LogType", "Number")
            add_sample_log_alg.setProperty("LogText", str(self._factor))
            add_sample_log_alg.execute()

        # Group output workspace
        workflow_prog.report('Appending moments')
        append_alg = self.createChildAlgorithm("AppendSpectra", enableLogging=False)
        append_alg.setProperty("InputWorkspace1", self._output_ws + '_M0')
        append_alg.setProperty("InputWorkspace2", self._output_ws + '_M1')
        append_alg.setProperty("ValidateInputs", False)
        append_alg.setProperty("OutputWorkspace", self._output_ws)
        append_alg.execute()
        append_alg.setProperty("InputWorkspace1", append_alg.getProperty("OutputWorkspace").value)
        append_alg.setProperty("InputWorkspace2", self._output_ws + '_M2')
        append_alg.setProperty("ValidateInputs", False)
        append_alg.setProperty("OutputWorkspace", self._output_ws)
        append_alg.execute()
        append_alg.setProperty("InputWorkspace1", append_alg.getProperty("OutputWorkspace").value)
        append_alg.setProperty("InputWorkspace2", self._output_ws + '_M3')
        append_alg.setProperty("ValidateInputs", False)
        append_alg.setProperty("OutputWorkspace", self._output_ws)
        append_alg.execute()
        append_alg.setProperty("InputWorkspace1", append_alg.getProperty("OutputWorkspace").value)
        append_alg.setProperty("InputWorkspace2", self._output_ws + '_M4')
        append_alg.setProperty("ValidateInputs", False)
        append_alg.setProperty("OutputWorkspace", self._output_ws)
        append_alg.execute()
        mtd.addOrReplace(self._output_ws, append_alg.getProperty("OutputWorkspace").value)
        delete_alg.setProperty("Workspace", self._output_ws + '_M0')
        delete_alg.execute()
        delete_alg.setProperty("Workspace", self._output_ws + '_M1')
        delete_alg.execute()
        delete_alg.setProperty("Workspace", self._output_ws + '_M2')
        delete_alg.execute()
        delete_alg.setProperty("Workspace", self._output_ws + '_M3')
        delete_alg.execute()
        delete_alg.setProperty("Workspace", self._output_ws + '_M4')
        delete_alg.execute()

        # Create a new vertical axis for the Q and Q**2 workspaces
        y_axis = NumericAxis.create(5)
        for idx in range(5):
            y_axis.setValue(idx, idx)
        mtd[self._output_ws].replaceAxis(1, y_axis)

        self.setProperty("OutputWorkspace", self._output_ws)
Exemplo n.º 15
0
 def _integrate_iq(self):
     """
     Produces I(Q) or I(Phi,Q) using Q1DWeighted
     """
     if self._resolution == 'MildnerCarpenter':
         self._setup_mildner_carpenter()
     run = mtd[self._input_ws].getRun()
     q_min = run.getLogData('qmin').value
     q_max = run.getLogData('qmax').value
     self.log().information('Using qmin={0:.2f}, qmax={1:.2f}'.format(q_min, q_max))
     pixel_height = run.getLogData('pixel_height').value
     pixel_width = run.getLogData('pixel_width').value
     pixel_size = pixel_height if pixel_height >= pixel_width else pixel_width
     binning_factor = self.getProperty('BinningFactor').value
     wavelength = 0. # for TOF mode there is no wavelength
     if run.hasProperty('wavelength'):
         wavelength = run.getLogData('wavelength').value
     l2 = run.getLogData('l2').value
     beamY = 0.
     if run.hasProperty('BeamCenterY'):
         beamY = run.getLogData('BeamCenterY').value
     q_binning = self._get_iq_binning(q_min, q_max, pixel_size, wavelength, l2, binning_factor, -beamY)
     n_wedges = self.getProperty('NumberOfWedges').value
     pixel_division = self.getProperty('NPixelDivision').value
     gravity = wavelength == 0.
     if self._output_type == 'I(Q)':
         wedge_ws = self.getPropertyValue('WedgeWorkspace')
         wedge_angle = self.getProperty('WedgeAngle').value
         wedge_offset = self.getProperty('WedgeOffset').value
         asymm_wedges = self.getProperty('AsymmetricWedges').value
         Q1DWeighted(InputWorkspace=self._input_ws, OutputWorkspace=self._output_ws,
                     NumberOfWedges=n_wedges, OutputBinning=q_binning, AccountForGravity=gravity,
                     WedgeWorkspace=wedge_ws, WedgeAngle=wedge_angle, WedgeOffset=wedge_offset,
                     AsymmetricWedges=asymm_wedges, NPixelDivision=pixel_division)
         if self._resolution == 'MildnerCarpenter':
             x = mtd[self._output_ws].readX(0)
             mid_x = (x[1:] + x[:-1]) / 2
             res = self._deltaQ(mid_x)
             mtd[self._output_ws].setDx(0, res)
             if n_wedges != 0:
                 for wedge in range(n_wedges):
                     mtd[wedge_ws].getItem(wedge).setDx(0, res)
         if n_wedges != 0:
             self.setProperty('WedgeWorkspace', mtd[wedge_ws])
     elif self._output_type == 'I(Phi,Q)':
         wedge_ws = '__wedges' + self._input_ws
         iq_ws = '__iq' + self._input_ws
         wedge_angle = 360./n_wedges
         azimuth_axis = NumericAxis.create(n_wedges)
         azimuth_axis.setUnit("Degrees")
         for i in range(n_wedges):
             azimuth_axis.setValue(i, i * wedge_angle)
         Q1DWeighted(InputWorkspace=self._input_ws, OutputWorkspace=iq_ws, NumberOfWedges=n_wedges,
                     NPixelDivision=pixel_division, OutputBinning=q_binning, WedgeWorkspace=wedge_ws,
                     WedgeAngle=wedge_angle, AsymmetricWedges=True, AccountForGravity=gravity)
         DeleteWorkspace(iq_ws)
         ConjoinSpectra(InputWorkspaces=wedge_ws, OutputWorkspace=self._output_ws)
         mtd[self._output_ws].replaceAxis(1, azimuth_axis)
         DeleteWorkspace(wedge_ws)
         if self._resolution == 'MildnerCarpenter':
             x = mtd[self._output_ws].readX(0)
             mid_x = (x[1:] + x[:-1]) / 2
             res = self._deltaQ(mid_x)
             for i in range(mtd[self._output_ws].getNumberHistograms()):
                 mtd[self._output_ws].setDx(i, res)
Exemplo n.º 16
0
 def _q_rebin(self, ws):
     """
     Rebins the single crystal omega scan measurement output onto 2D Qx-Qy grid.
     :param ws: Output of the cross-section separation and/or normalisation.
     :return: WorkspaceGroup containing 2D distributions on a Qx-Qy grid.
     """
     DEG_2_RAD = np.pi / 180.0
     fld = self._sampleAndEnvironmentProperties['fld'].value if 'fld' in self._sampleAndEnvironmentProperties else 1
     nQ = self._sampleAndEnvironmentProperties['nQ'].value if 'nQ' in self._sampleAndEnvironmentProperties else 80
     omega_shift = self._sampleAndEnvironmentProperties['OmegaShift'].value \
         if 'OmegaShift' in self._sampleAndEnvironmentProperties else 0
     wavelength = mtd[ws][0].getRun().getLogData('monochromator.wavelength').value
     ki = 2 * np.pi / wavelength
     dE = 0.0  # monochromatic data
     const_val = 2.07194  # hbar^2/2m
     kf = np.sqrt(ki * ki - dE / const_val)
     twoTheta = -mtd[ws][0].getAxis(1).extractValues() * DEG_2_RAD  # detector positions in radians
     omega = mtd[ws][0].getAxis(0).extractValues() * DEG_2_RAD  # omega scan angle in radians
     ntheta = len(twoTheta)
     nomega = len(omega)
     omega = np.matrix(omega) + omega_shift * DEG_2_RAD
     Qmag = np.sqrt(ki * ki + kf * kf - 2 * ki * kf * np.cos(twoTheta))
     # beta is the angle between ki and Q
     beta = (twoTheta / np.abs(twoTheta)) * np.arccos((ki * ki - kf * kf + Qmag * Qmag) / (2 * ki * Qmag))
     alpha = -np.pi/2 + omega.T * np.ones(shape=(1, ntheta)) + np.ones(shape=(nomega, 1)) * beta
     Qx = np.multiply((np.ones(shape=(nomega, 1)) * Qmag), np.cos(alpha)).T
     Qy = np.multiply((np.ones(shape=(nomega, 1)) * Qmag), np.sin(alpha)).T
     Qmax = 1.1 * np.max(Qmag)
     dQ = Qmax / nQ
     output_names = []
     for entry in mtd[ws]:
         w_out = np.zeros(shape=((fld + 1) * nQ, (fld + 1) * nQ))
         e_out = np.zeros(shape=((fld + 1) * nQ, (fld + 1) * nQ))
         n_out = np.zeros(shape=((fld + 1) * nQ, (fld + 1) * nQ))
         w_in = entry.extractY()
         e_in = entry.extractE()
         for theta in range(ntheta):
             for omega in range(nomega):
                 if fld == 1:
                     ix = int(((Qx[theta, omega] + dQ / 2.) / dQ) + nQ)
                     iy = int(((Qy[theta, omega] + dQ / 2.) / dQ) + nQ)
                     if Qx[theta, omega] > 0.99 * Qmax or Qy[theta, omega] > 0.99 * Qmax:
                         continue
                 else:
                     ix = int(abs((Qx[theta, omega]) + dQ / 2.) / dQ)
                     iy = int(abs((Qy[theta, omega]) + dQ / 2.) / dQ)
                 w_out[ix, iy] += w_in[theta, omega]
                 e_out[ix, iy] += e_in[theta, omega]**2
                 n_out[ix, iy] += 1.
         w_out /= n_out
         e_out = np.sqrt(e_out / n_out)
         w_out_name = entry.name() + '_qxqy'
         output_names.append(w_out_name)
         data_x = [(val-(fld*nQ)) * dQ for val in range((fld+1)*nQ)]
         y_axis = NumericAxis.create(int((fld+1)*nQ))
         for q_index in range(int((fld+1)*nQ)):
             y_axis.setValue(q_index, (q_index-(fld*nQ))*dQ)
         CreateWorkspace(DataX=data_x, DataY=w_out, DataE=e_out, NSpec=int((fld+1)*nQ),
                         OutputWorkspace=w_out_name)
         mtd[w_out_name].replaceAxis(1, y_axis)
         mtd[w_out_name].getAxis(0).setUnit('Label').setLabel('Qx', r'\AA^{-1}')
         mtd[w_out_name].getAxis(1).setUnit('Label').setLabel('Qy', r'\AA^{-1}')
         ReplaceSpecialValues(InputWorkspace=w_out_name, OutputWorkspace=w_out_name, NaNValue=0,
                              NaNError=0, InfinityValue=0, InfinityError=0)
     DeleteWorkspace(Workspace=ws)
     GroupWorkspaces(InputWorkspaces=output_names, OutputWorkspace=ws)
     return ws
    def _get_scan_data(self, ws_name, progress):
        """ Loads YIG scan data, removes monitors, and prepares
        a workspace for Bragg peak fitting"""
        # workspace indices for monitors
        monitor_indices = "{0}, {1}".format(self._D7NumberPixels, self._D7NumberPixels+1)
        scan_data_name = "scan_data_{}".format(ws_name)
        self._created_ws_names.append(scan_data_name)
        progress.report(0, 'Loading YIG scan data')
        LoadAndMerge(Filename=self.getPropertyValue("Filenames"), OutputWorkspace=scan_data_name, LoaderName='LoadILLPolarizedDiffraction',
                     startProgress=0.0, endProgress=0.6)
        progress.report(6, 'Conjoining the scan data')
        # load the group into a single table workspace
        nfiles = mtd[scan_data_name].getNumberOfEntries()
        # new vertical axis
        x_axis = NumericAxis.create(nfiles)
        # Fill the intensity and position tables with all the data from scans
        conjoined_scan_name = "conjoined_input_{}".format(ws_name)
        # if the name exists in ADS, delete it
        if conjoined_scan_name in mtd:
            DeleteWorkspace(Workspace=conjoined_scan_name)
        self._created_ws_names.append(conjoined_scan_name)
        masking_criteria = self._prepare_masking_criteria()
        name_list = []
        for entry_no, entry in enumerate(mtd[scan_data_name]):
            # normalize to monitor1 as monitor2 is sometimes empty:
            monitor1_counts = entry.readY(self._D7NumberPixels)[0]
            if monitor1_counts != 0:
                monitor_name = '__monitor_' + entry.name()
                CreateSingleValuedWorkspace(DataValue=monitor1_counts, ErrorValue=np.sqrt(monitor1_counts),
                                            OutputWorkspace=monitor_name)
                Divide(LHSWorkspace=entry,
                       RHSWorkspace=monitor_name,
                       OutputWorkspace=entry)
                DeleteWorkspace(Workspace=monitor_name)
            # remove Monitors:
            RemoveSpectra(InputWorkspace=entry, WorkspaceIndices=monitor_indices, OutputWorkspace=entry)
            # prepare proper label for the axes
            x_axis_label = entry.run().getProperty('2theta.requested').value
            x_axis.setValue(entry_no, x_axis_label)
            # convert the x-axis to signedTwoTheta
            ConvertAxisByFormula(InputWorkspace=entry,
                                 Axis='X',
                                 Formula='-180.0 * signedtwotheta / pi',
                                 OutputWorkspace=entry)
            # mask bins using predefined ranges
            for criterion in masking_criteria:
                MaskBinsIf(InputWorkspace=entry,
                           Criterion=criterion,
                           OutputWorkspace=entry)
            # append the new row to a new MatrixWorkspace
            ConvertToPointData(InputWorkspace=entry, OutputWorkspace=entry)
            name_list.append(entry.name())

        ConjoinXRuns(InputWorkspaces=name_list, OutputWorkspace=conjoined_scan_name)
        #replace axis and corrects labels
        x_axis.setUnit("Label").setLabel('TwoTheta', 'degrees')
        mtd[conjoined_scan_name].replaceAxis(0, x_axis)
        y_axis = NumericAxis.create(self._D7NumberPixels)
        for pixel_no in range(self._D7NumberPixels):
            y_axis.setValue(pixel_no, pixel_no+1)
        mtd[conjoined_scan_name].replaceAxis(1, y_axis)
        return conjoined_scan_name