Пример #1
0
 def test_container_rebinning_enabled(self):
     xs = numpy.array([0.0, 1.0, 0.0, 1.1])
     ys = numpy.array([2.2, 3.3])
     sample_1 = CreateWorkspace(DataX=xs,
                                DataY=ys,
                                NSpec=2,
                                UnitX='Wavelength')
     xs = numpy.array([-1.0, 0.0, 1.0, 2.0, -1.0, 0.0, 1.0, 2.0])
     ys = numpy.array([0.101, 0.102, 0.103, 0.104, 0.105, 0.106])
     container_1 = CreateWorkspace(DataX=xs,
                                   DataY=ys,
                                   NSpec=2,
                                   UnitX='Wavelength')
     corrected = ApplyPaalmanPingsCorrection(SampleWorkspace=sample_1,
                                             CanWorkspace=container_1,
                                             RebinCanToSample=True)
     self.assertTrue(numpy.all(sample_1.extractY() > corrected.extractY()))
     DeleteWorkspace(sample_1)
     DeleteWorkspace(container_1)
     DeleteWorkspace(corrected)
Пример #2
0
 def _nanminmaxSetup(self):
     xs = numpy.tile(numpy.array([-1, 0, 2, 4, 5]), 3)
     ys = numpy.linspace(-5, 3, 4 * 3)
     vertAxis = numpy.array([-3, -1, 2, 4])
     ws = CreateWorkspace(DataX=xs,
                          DataY=ys,
                          NSpec=3,
                          VerticalAxisUnit='Degrees',
                          VerticalAxisValues=vertAxis,
                          StoreInADS=False)
     return ws
Пример #3
0
def _generate_sample_ws(ws_name):
    data_x = np.arange(0, 10, 0.01)
    data_y = _rayleigh(data_x, 1)

    # Create the workspace and give it some units
    CreateWorkspace(OutputWorkspace=ws_name, DataX=data_x, DataY=data_y, \
                    UnitX='MomentumTransfer', VerticalAxisUnit='QSquared', VerticalAxisValues='0.2')
    # Centre the peak over 0
    ScaleX(InputWorkspace=ws_name, Factor=-1, Operation="Add", OutputWorkspace=ws_name)

    return mtd[ws_name]
Пример #4
0
    def test_add_workspace_to_ADS_adds_workspace_to_ads_in_correct_group_structure(self):
        workspace = CreateWorkspace([0, 0], [0, 0])
        workspace_name = 'test_workspace_name'
        workspace_directory = 'root/level one/level two/'

        self.model.add_workspace_to_ADS(workspace, workspace_name, workspace_directory)

        self.assertTrue(AnalysisDataService.doesExist(workspace_name))
        self.assertTrue(AnalysisDataService.doesExist('root'))
        self.assertTrue(AnalysisDataService.doesExist('level one'))
        self.assertTrue(AnalysisDataService.doesExist('level two'))
 def populate_ADS(self):
     self.context.calculate_all_groups()
     self.context.show_all_groups()
     self.context.calculate_all_pairs()
     self.context.show_all_pairs()
     CreateWorkspace(
         [0], [0],
         OutputWorkspace='EMU19489; PhaseQuad; PhaseTable EMU19489')
     self.context.phase_context.add_phase_quad(
         MuonWorkspaceWrapper('EMU19489; PhaseQuad; PhaseTable EMU19489'),
         '19489')
Пример #6
0
 def _create_workspace(self, x_values: list, x_errors: list,
                       x_parameter: str, y_values: list, y_errors: list,
                       y_label: str, output_name: str) -> None:
     """Creates a matrix workspace using the provided data. Uses UnitX if the parameter exists in the UnitFactory."""
     if self._is_in_unit_factory(x_parameter):
         CreateWorkspace(DataX=x_values,
                         Dx=x_errors,
                         DataY=y_values,
                         DataE=y_errors,
                         UnitX=x_parameter,
                         YUnitLabel=y_label,
                         OutputWorkspace=output_name)
     else:
         CreateWorkspace(DataX=x_values,
                         Dx=x_errors,
                         DataY=y_values,
                         DataE=y_errors,
                         YUnitLabel=y_label,
                         OutputWorkspace=output_name)
         self._set_x_label(output_name, x_parameter)
Пример #7
0
 def setUpClass(cls):
     cls.ws2d_histo = CreateWorkspace(
         DataX=[10, 20, 30, 10, 20, 30, 10, 20, 30],
         DataY=[2, 3, 4, 5, 3, 5],
         DataE=[1, 2, 3, 4, 1, 1],
         NSpec=3,
         Distribution=True,
         UnitX='Wavelength',
         VerticalAxisUnit='DeltaE',
         VerticalAxisValues=[4, 6, 8],
         OutputWorkspace='ws2d_histo')
 def _create_and_plot_matrix_workspace(self,
                                       name="workspace",
                                       distribution=False):
     ws = CreateWorkspace(OutputWorkspace=name,
                          DataX=zeros(10),
                          DataY=zeros(10),
                          NSpec=2,
                          Distribution=distribution)
     fig = plot([ws], spectrum_nums=[1])
     canvas = fig.canvas
     return fig, canvas
Пример #9
0
    def test_get_spectrum_ws_multi_spectra(self):
        from mantid.simpleapi import CreateWorkspace
        cfms = CrystalFieldMultiSite(['Ce'], ['C2v'],
                                     B20=0.035,
                                     B40=-0.012,
                                     B43=-0.027,
                                     B60=-0.00012,
                                     B63=0.0025,
                                     B66=0.0068,
                                     Temperatures=[4.0, 50.0],
                                     FWHM=[0.1, 0.2])

        x = np.linspace(0.0, 2.0, 30)
        y = np.zeros_like(x)
        e = np.ones_like(x)
        ws = CreateWorkspace(x, y, e)
        x, y = cfms.getSpectrum(0, ws)
        y = y / c_mbsr
        self.assertAlmostEqual(y[0], 12.474945990071641, 6)
        self.assertAlmostEqual(y[1], 4.3004130214544389, 6)
        self.assertAlmostEqual(y[2], 1.4523079303712476, 6)
        self.assertAlmostEqual(y[3], 0.6922657279528992, 6)
        self.assertAlmostEqual(y[4], 0.40107924259746491, 6)
        self.assertAlmostEqual(y[15], 0.050129858433581413, 6)
        self.assertAlmostEqual(y[16], 0.054427788297191478, 6)
        x, y = cfms.getSpectrum(1, ws)
        y = y / c_mbsr
        self.assertAlmostEqual(y[0], 6.3046623789675627, 6)
        self.assertAlmostEqual(y[1], 4.2753024205094912, 6)
        self.assertAlmostEqual(y[2], 2.1778204115683644, 6)
        self.assertAlmostEqual(y[3], 1.2011173460849718, 6)
        self.assertAlmostEqual(y[4], 0.74036730921135963, 6)
        x, y = cfms.getSpectrum(ws)
        y = y / c_mbsr
        self.assertAlmostEqual(y[0], 12.474945990071641, 6)
        self.assertAlmostEqual(y[1], 4.3004130214544389, 6)
        ws = CreateWorkspace(x, y, e, 2)
        x, y = cfms.getSpectrum(ws, 1)
        y = y / c_mbsr
        self.assertAlmostEqual(y[0], 0.050129858433581413, 6)
        self.assertAlmostEqual(y[1], 0.054427788297191478, 6)
Пример #10
0
 def setUpClass(cls):
     cls.ws2d_histo = CreateWorkspace(DataX=[10, 20, 30, 10, 20, 30, 10, 20, 30],
                                      DataY=[2, 3, 4, 5, 3, 5],
                                      DataE=[1, 2, 3, 4, 1, 1],
                                      NSpec=3,
                                      Distribution=True,
                                      UnitX='Wavelength',
                                      VerticalAxisUnit='DeltaE',
                                      VerticalAxisValues=[4, 6, 8],
                                      OutputWorkspace='ws2d_histo')
     # initialises the QApplication
     super(cls, FigureErrorsManagerTest).setUpClass()
Пример #11
0
    def PyExec(self):
        self._setup()
        if self._binning_for_calc.size == 0:
            x = np.array(self._input_ws.readX(self._incident_index))
            self._binning_for_calc = [
                i for i in [min(x), x[1] -
                            x[0], max(x) + x[1] - x[0]]
            ]
        else:
            x = np.arange(self._binning_for_calc[0], self._binning_for_calc[2],
                          self._binning_for_calc[1])
        if self._binning_for_fit.size == 0:
            x_fit = np.array(self._input_ws.readX(self._incident_index))
            y_fit = np.array(self._input_ws.readY(self._incident_index))
        else:
            rebinned = Rebin(self._input_ws,
                             Params=self._binning_for_fit,
                             PreserveEvents=True,
                             StoreInADS=False)
            x_fit = np.array(rebinned.readX(self._incident_index))
            y_fit = np.array(rebinned.readY(self._incident_index))

        x_bin_centers = 0.5 * (x[:-1] + x[1:])
        if len(x_fit) != len(y_fit):
            x_fit = 0.5 * (x_fit[:-1] + x_fit[1:])
        if self._fit_spectrum_with == 'CubicSpline':
            # Fit using cubic spline
            fit, fit_prime = self.fit_cubic_spline(x_fit,
                                                   y_fit,
                                                   x_bin_centers,
                                                   s=1e7)
        elif self._fit_spectrum_with == 'CubicSplineViaMantid':
            # Fit using cubic spline via Mantid
            fit, fit_prime = self.fit_cubic_spline_via_mantid_spline_smoothing(
                self._input_ws,
                params_input=self._binning_for_fit,
                params_output=self._binning_for_calc,
                Error=0.0001,
                MaxNumberOfBreaks=0)
        elif self._fit_spectrum_with == 'GaussConvCubicSpline':
            # Fit using Gauss conv cubic spline
            fit, fit_prime = self.fit_cubic_spline_with_gauss_conv(
                x_fit, y_fit, x_bin_centers, sigma=0.5)
        # Create output workspace
        unit = self._input_ws.getAxis(0).getUnit().unitID()
        output_workspace = CreateWorkspace(DataX=x,
                                           DataY=np.append(fit, fit_prime),
                                           UnitX=unit,
                                           NSpec=2,
                                           Distribution=False,
                                           ParentWorkspace=self._input_ws,
                                           StoreInADS=False)
        self.setProperty("OutputWorkspace", output_workspace)
Пример #12
0
 def test_container_rebinning_disabled(self):
     xs = numpy.array([0.0, 1.0, 0.0, 1.1])
     ys = numpy.array([2.2, 3.3])
     sample_1 = CreateWorkspace(DataX=xs, DataY=ys, NSpec=2,
                                UnitX='Wavelength')
     xs = numpy.array([-1.0, 0.0, 1.0, 2.0, -1.0, 0.0, 1.0, 2.0])
     ys = numpy.array([0.101, 0.102, 0.103, 0.104, 0.105, 0.106])
     container_1 = CreateWorkspace(DataX=xs, DataY=ys, NSpec=2,
                                   UnitX='Wavelength')
     corrected_ws_name = 'corrected_workspace'
     kwargs = {
         'SampleWorkspace': sample_1,
         'CanWorkspace': container_1,
         'OutputWorkspaced': corrected_ws_name,
         'RebinCanToSample': False
     }
     # The Minus algorithm will fail due to different bins in sample and
     # container.
     self.assertRaises(RuntimeError, ApplyPaalmanPingsCorrection, **kwargs)
     DeleteWorkspace(sample_1)
     DeleteWorkspace(container_1)
Пример #13
0
 def create_larger_group(self):
     ws_list = []
     for i in range(5):
         ws_name = 'ws_' + str(i + 1)
         data_x = np.arange(i, (i + 1) * 10 + 0.1, 0.1)
         data_y = np.arange(i, (i + 1) * 10, 0.1)
         data_e = np.arange(i, (i + 1) * 10, 0.1)
         new_ws = CreateWorkspace(OutputWorkspace=ws_name, DataX=data_x, DataY=data_y, DataE=data_e)
         if i == 0:
             new_ws *= 100
         ws_list.append(new_ws)
     GroupWorkspaces(InputWorkspaces=ws_list, OutputWorkspace='ws_group_large')
Пример #14
0
 def test_that_plotting_ws_without_giving_spec_num_sets_correct_spec_num_after_spectra_removed(
         self):
     CreateWorkspace(DataX=[10, 20, 30],
                     DataY=[10, 20, 30],
                     DataE=[1, 1, 1],
                     NSpec=3,
                     OutputWorkspace="ws-with-3-spec")
     RemoveSpectra("ws-with-3-spec", [0, 1], OutputWorkspace='out_ws')
     out_ws = ADS.retrieve('out_ws')
     self.ax.plot(out_ws)
     ws_artist = self.ax.tracked_workspaces['out_ws'][0]
     self.assertEqual(3, ws_artist.spec_num)
Пример #15
0
    def test_scale_on_ragged_workspaces_maintained_when_toggling_normalisation(self):
        ws = CreateWorkspace(DataX=[1, 2, 3, 4, 2, 4, 6, 8], DataY=[2] * 8, NSpec=2, OutputWorkspace="ragged_ws")
        fig = pcolormesh_from_names([ws])
        mock_canvas = MagicMock(figure=fig)
        fig_manager_mock = MagicMock(canvas=mock_canvas)
        fig_interactor = FigureInteraction(fig_manager_mock)
        fig_interactor._toggle_normalization(fig.axes[0])

        clim = fig.axes[0].images[0].get_clim()
        fig_interactor._toggle_normalization(fig.axes[0])
        self.assertEqual(clim, fig.axes[0].images[0].get_clim())
        self.assertNotEqual((-0.1, 0.1), fig.axes[0].images[0].get_clim())
Пример #16
0
 def test_plotprofiles_noXUnitsExecutes(self):
     xs = numpy.linspace(-3., 10., 12)
     ys = numpy.tile(1., len(xs) - 1)
     ws = CreateWorkspace(DataX=xs, DataY=ys, NSpec=1, StoreInADS=False)
     kwargs = {'workspaces': ws}
     figure, axes = testhelpers.assertRaisesNothing(
         self, directtools.plotprofiles, **kwargs)
     self.assertEquals(axes.get_xlabel(), '')
     self.assertEquals(axes.get_ylabel(), '$S(Q,E)$')
     numpy.testing.assert_equal(axes.get_lines()[0].get_data()[0],
                                (xs[1:] + xs[:-1]) / 2)
     numpy.testing.assert_equal(axes.get_lines()[0].get_data()[1], ys)
Пример #17
0
 def _make_single_histogram_ws(self):
     # X-axis width is a multiple of the final bin width so rebinning
     # creates full bins only.
     binWidths = numpy.array([0.13, 0.23, 0.05, 0.27, 0.42])
     xBegin = -0.11
     xs = self._make_boundaries(xBegin, binWidths)
     ys = numpy.zeros(len(xs) - 1)
     ws = CreateWorkspace(DataX=xs, DataY=ys)
     i = len(binWidths) // 2
     middleBinWidth = binWidths[i]
     middleBinX = xs[i] + 0.5 * middleBinWidth
     return ws, middleBinX, middleBinWidth
Пример #18
0
 def setUpClass(cls):
     cls.ws = CreateWorkspace(DataX=np.array([10, 20, 30],
                                             dtype=np.float64),
                              DataY=np.array([2, 3], dtype=np.float64),
                              DataE=np.array([0.02, 0.02],
                                             dtype=np.float64),
                              Distribution=False,
                              UnitX='Wavelength',
                              YUnitLabel='Counts',
                              OutputWorkspace='ws')
     cls.ws1 = CreateWorkspace(DataX=np.array([11, 21, 31],
                                              dtype=np.float64),
                               DataY=np.array([3, 4], dtype=np.float64),
                               DataE=np.array([0.03, 0.03],
                                              dtype=np.float64),
                               Distribution=False,
                               UnitX='Wavelength',
                               YUnitLabel='Counts',
                               OutputWorkspace='ws1')
     # initialises the QApplication
     super(cls, FigureInteractionTest).setUpClass()
Пример #19
0
    def _createOneSpectrum(self, wkspname):
        x = np.arange(300, 16667, 15.)
        y = np.random.random(len(x) - 1)  # histogram
        e = np.sqrt(y)

        CreateWorkspace(OutputWorkspace=wkspname,
                        DataX=x,
                        DataY=y,
                        DataE=e,
                        NSpec=1,
                        UnitX='TOF',
                        YUnitlabel="stuff")
 def test_curve_has_errors_on_workspace_with_no_errors(self):
     try:
         ws = CreateWorkspace(DataX=[0],
                              DataY=[0],
                              NSpec=1,
                              OutputWorkspace='test_ws')
         fig = figure()
         ax = fig.add_subplot(111, projection='mantid')
         curve = ax.plot(ws, specNum=1)[0]
         self.assertFalse(curve_has_errors(curve))
     finally:
         ws.delete()
Пример #21
0
 def test_that_plotting_ws_without_giving_spec_num_sets_spec_num_if_ws_has_1_histogram(
         self):
     ws_name = "ws-with-one-spec"
     ws = CreateWorkspace(DataX=[10, 20],
                          DataY=[10, 5000],
                          DataE=[1, 1],
                          OutputWorkspace=ws_name)
     self.ax.plot(ws)
     ws_artist = self.ax.tracked_workspaces[ws_name][0]
     self.assertEqual(1, ws_artist.spec_num)
     self.assertTrue('specNum' in self.ax.creation_args[0])
     self.assertFalse('wkspIndex' in self.ax.creation_args[0])
Пример #22
0
 def _do_image_replace_common_bins(self, color_func, artists):
     im_data = CreateWorkspace(DataX=[10, 20, 30, 10, 20, 30, 10, 20, 30],
                               DataY=[3, 4, 5, 3, 4, 5],
                               DataE=[1, 2, 3, 4, 1, 1],
                               NSpec=3)
     getattr(self.ax, color_func)(im_data)
     im_data = CreateWorkspace(DataX=[20, 30, 40, 20, 30, 40, 20, 30, 40],
                               DataY=[3, 4, 5, 3, 4, 5],
                               DataE=[.1, .2, .3, .4, .1, .1],
                               NSpec=3,
                               VerticalAxisValues=[2, 3, 4],
                               VerticalAxisUnit='DeltaE')
     self.ax.replace_workspace_artists(im_data)
     self.assertEqual(1, len(artists))
     left, right, bottom, top = get_colorplot_extents(artists[0])
     self.assertAlmostEqual(20., left)
     self.assertAlmostEqual(40., right)
     self.assertAlmostEqual(1.5, bottom)
     self.assertAlmostEqual(4.5, top)
     # try deleting
     self.ax.remove_workspace_artists(im_data)
Пример #23
0
def create_group_populated_by_two_rebinned_workspaces():
    group = MuonGroup(group_name="group1")
    counts_workspace_22222 = CreateWorkspace([0], [0])
    asymmetry_workspace_22222 = CreateWorkspace([0], [0])
    asymmetry_workspace_unnorm_22222 = CreateWorkspace([0], [0])

    group.update_counts_workspace(MuonRun([22222]), counts_workspace_22222,
                                  True)
    group.update_asymmetry_workspace(MuonRun([22222]),
                                     asymmetry_workspace_22222,
                                     asymmetry_workspace_unnorm_22222, True)

    group.show_rebin([22222], 'counts_name_22222_rebin',
                     'asymmetry_name_22222_rebin',
                     'asymmetry_name_22222_unnorm')
    counts_workspace_33333 = CreateWorkspace([0], [0])
    asymmetry_workspace_33333 = CreateWorkspace([0], [0])
    asymmetry_workspace_unnorm_33333 = CreateWorkspace([0], [0])

    group.update_counts_workspace(MuonRun([33333]), counts_workspace_33333,
                                  True)
    group.update_asymmetry_workspace(MuonRun([33333]),
                                     asymmetry_workspace_33333,
                                     asymmetry_workspace_unnorm_33333, True)

    group.show_rebin([33333], 'counts_name_33333_rebin',
                     'asymmetry_name_33333_rebin',
                     'asymmetry_name_33333_unnorm')

    return group
Пример #24
0
    def setUp(self):
        # Creating two peaks on an exponential background with gaussian noise
        self.x_values = np.linspace(0, 100, 1001)
        self.centre = [25, 75]
        self.height = [35, 20]
        self.width = [10, 5]
        self.y_values = self.gaussian(self.x_values, self.centre[0],
                                      self.height[0], self.width[0])

        self.y_values += self.gaussian(self.x_values, self.centre[1],
                                       self.height[1], self.width[1])

        self.background = 10 * np.ones(len(self.x_values))
        self.y_values += self.background

        # Generating a table with a guess of the position of the centre of the peaks
        peak_table = CreateEmptyTableWorkspace()
        peak_table.addColumn(type='float', name='Approximated Centre')
        peak_table.addRow([self.centre[0] + 2])
        peak_table.addRow([self.centre[1] - 3])

        self.peakids = [
            np.argwhere(self.x_values == self.centre[0])[0, 0],
            np.argwhere(self.x_values == self.centre[1])[0, 0]
        ]

        # Generating a workspace with the data and a flat background
        self.raw_ws = CreateWorkspace(DataX=self.x_values,
                                      DataY=self.y_values,
                                      OutputWorkspace='raw_ws')
        self.data_ws = CreateWorkspace(
            DataX=np.concatenate((self.x_values, self.x_values)),
            DataY=np.concatenate((self.y_values, self.background)),
            DataE=np.sqrt(np.concatenate((self.y_values, self.background))),
            NSpec=2,
            OutputWorkspace='data_ws')

        self.peak_guess_table = peak_table

        self.alg_instance = _FindPeaksAutomatic.FindPeaksAutomatic()
Пример #25
0
    def TransfitRebin(self, inputWS, outputWSName, foilType, divE):
        ws2D = mtd[inputWS]
        # Expand the limits for rebinning to prevent potential issues at the boundaries
        startE = self.ResParamsDict[foilType + '_startE']
        endE = self.ResParamsDict[foilType + '_endE']
        startEp = 0.99 * startE
        endEp = 1.01 * endE
        CropWorkspace(InputWorkspace=ws2D,
                      OutputWorkspace=ws2D,
                      XMin=1000 * startEp,
                      XMax=1000 * endEp)
        numPts = np.int(((endEp - startEp) / divE) + 1)
        xData_out = []
        xData_in = ws2D.readX(0)
        yData_in = ws2D.readY(0)
        # Deals with issues in Mantid where Xdata mark start of bin, not true x position
        # calculates the bin width and then takes middle of bin as x value
        current_bin_widths = []
        xActual = []
        for x in range(0, len(xData_in) - 1):
            current_bin_widths.append(xData_in[x + 1] - xData_in[x])
            xActual.append(xData_in[x] + 0.5 * (xData_in[x + 1] - xData_in[x]))
        # Make xData with uniform binning defined by divE
        for j in range(numPts):
            xData_out.append(1000 * (startEp + j * divE))
        yData_out = [0] * (len(xData_out))
        # Normalise output ydata accordingly based on surrounding values
        yNorm = [0] * (len(xData_out))
        for j in range(0, len(yData_in)):
            currentBin = np.int((xActual[j] - startEp * 1000) / (divE * 1000))
            scale1 = 1 - ((xActual[j] - xData_out[currentBin]) / (divE * 1000))
            yData_out[currentBin] += yData_in[j] * scale1
            yNorm[currentBin] += scale1
            if currentBin < (len(xData_out) - 1):
                yData_out[currentBin + 1] += yData_in[j] * (1 - scale1)
                yNorm[currentBin + 1] += 1 - scale1
        # Apply the normalisation, with a catch for any potential divide by zero errors
        for i in range(len(yData_out)):
            if yNorm[i] != 0:
                yData_out[i] = yData_out[i] / yNorm[i]
            else:
                print('Empty bin')

        outputWS = CreateWorkspace(DataX=xData_out,
                                   DataY=yData_out,
                                   NSpec=1,
                                   UnitX='meV')
        CropWorkspace(InputWorkspace=outputWS,
                      OutputWorkspace=outputWS,
                      XMin=1000 * startE,
                      XMax=1000 * endE)
        RenameWorkspace(InputWorkspace=outputWS, OutputWorkspace=outputWSName)
Пример #26
0
    def PyExec(self):
        input_file = self.getProperty("InputFile").value
        output_ws = self.getPropertyValue("OutputWorkspace")
        logs = ''

        with h5py.File(input_file, 'r') as hf:
            data = numpy.array(hf.get('entry1/data1/DATA'), dtype='float')
            if data.ndim > 2:
                raise RuntimeError(
                    'Data with more than 2 dimensions are not supported.')
            errors = numpy.array(hf.get('entry1/data1/errors'), dtype='float')
            x = numpy.array(hf.get('entry1/data1/X'), dtype='float')
            if "entry1/data1/PARAMETERS" in hf:
                logs = str(
                    hf.get('entry1/data1/PARAMETERS')[0].decode('UTF-8'))
            y = numpy.array([0])
            nspec = 1
            if data.ndim == 2:
                y = numpy.array(hf.get('entry1/data1/Y'), dtype='float')
                nspec = data.shape[0]
                if x.ndim == 1:
                    x = numpy.tile(x, nspec)

        CreateWorkspace(DataX=x,
                        DataY=data,
                        DataE=errors,
                        NSpec=nspec,
                        VerticalAxisUnit='Label',
                        VerticalAxisValues=y,
                        OutputWorkspace=output_ws)

        if logs:
            log_names = []
            log_values = []
            for log in logs.split('\n'):
                split = log.strip().split('=')
                if len(split) == 2:
                    name = split[0]
                    value = split[1]
                    if name and value:
                        log_names.append(name)
                        log_values.append(value)
            if log_names:
                try:
                    AddSampleLogMultiple(Workspace=output_ws,
                                         LogNames=log_names,
                                         LogValues=log_values)
                except RuntimeError as e:
                    self.log().warning(
                        'Unable to set the sample logs, reason: ' + str(e))

        self.setProperty('OutputWorkspace', output_ws)
 def test_container_input_workspace_not_unintentionally_rebinned(self):
     xs = numpy.array([0.0, 1.0, 0.0, 1.1])
     ys = numpy.array([2.2, 3.3])
     sample_1 = CreateWorkspace(DataX=xs,
                                DataY=ys,
                                NSpec=2,
                                UnitX='Wavelength')
     ys = numpy.array([0.11, 0.22])
     container_1 = CreateWorkspace(DataX=xs,
                                   DataY=ys,
                                   NSpec=2,
                                   UnitX='Wavelength')
     corrected = ApplyPaalmanPingsCorrection(SampleWorkspace=sample_1,
                                             CanWorkspace=container_1)
     numHisto = container_1.getNumberHistograms()
     for i in range(numHisto):
         container_xs = container_1.readX(i)
         for j in range(len(container_xs)):
             self.assertEqual(container_xs[j], xs[i * numHisto + j])
     DeleteWorkspace(sample_1)
     DeleteWorkspace(container_1)
     DeleteWorkspace(corrected)
    def test_validate_inputs_fails_if_no_instrument(self):
        test_ws = CreateWorkspace(StoreInADS=False,
                                  DataX=[1, 2, 3],
                                  DataY=[10, 20, 30])

        alg = ReflectometryISISSumBanks()
        alg.initialize()
        alg.setProperty('InputWorkspace', test_ws)

        issues = alg.validateInputs()
        self.assertEqual(len(issues), 1)
        self.assertEqual(issues['InputWorkspace'],
                         'The input workspace must have an instrument')
Пример #29
0
    def wrapper(self):
        dataX = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
        dataY = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
        dataE = dataY
        dX = dataY

        ws = CreateWorkspace(DataX=dataX,
                             DataY=dataY,
                             DataE=dataE,
                             NSpec=4,
                             UnitX="Wavelength",
                             Dx=dX)
        return func(self, ws)
Пример #30
0
 def test_artists_normalization_state_labeled_correctly_for_non_dist_workspace_and_global_setting_off(
         self):
     non_dist_ws = CreateWorkspace(DataX=[10, 20, 25, 30],
                                   DataY=[2, 3, 4, 5],
                                   DataE=[1, 2, 1, 2],
                                   NSpec=1,
                                   Distribution=False,
                                   OutputWorkspace='non_dist_workpace')
     config['graph1d.autodistribution'] = 'Off'
     self.ax.plot(non_dist_ws, specNum=1)
     self.assertFalse(
         self.ax.tracked_workspaces[non_dist_ws.name()][0].is_normalized)
     del self.ax.tracked_workspaces[non_dist_ws.name()]