Example #1
0
    def update_log_workspace_group(self):
        # both ws and name needed in event a ws is renamed and ws.name() is no longer correct

        if not self._data_workspaces:
            self.delete_logs()
            return

        if not self._log_workspaces:
            self.create_log_workspace_group()
        else:
            for log in self._log_names:
                if not ADS.doesExist(log):
                    self.make_log_table(log)
                    self._log_workspaces.add(log)
            if not ADS.doesExist("run_info"):
                self.make_runinfo_table()
                self._log_workspaces.add("run_info")
        # update log tables
        self.remove_all_log_rows()
        for irow, (ws_name, ws) in enumerate(
                self._data_workspaces.get_loaded_ws_dict().items()):
            try:
                self.add_log_to_table(ws_name, ws, irow)
            except Exception as e:
                logger.warning(
                    f"Unable to output log workspaces for workspace {ws_name}: "
                    + str(e))
 def cleanup(self):
     if AnalysisDataService.doesExist(self._input_wksp):
         DeleteWorkspace(self._input_wksp)
     if AnalysisDataService.doesExist(self._output_wksp):
         DeleteWorkspace(self._output_wksp)
     if AnalysisDataService.doesExist(self._correction_wksp):
         DeleteWorkspace(self._correction_wksp)
Example #3
0
 def cleanup(self):
     if AnalysisDataService.doesExist(self._input_wksp):
         DeleteWorkspace(self._input_wksp)
     if AnalysisDataService.doesExist(self._output_wksp):
         DeleteWorkspace(self._output_wksp)
     if AnalysisDataService.doesExist(self._correction_wksp):
         DeleteWorkspace(self._correction_wksp)
Example #4
0
 def test_calibrate(self):
     data = self.corelli
     calibrate(data['workspace'],
               data['bank_name'],
               data['wire_positions'],
               data['peaks_form'],
               fitPar=data['fit_parameters'],
               outputPeak=True,
               parameters_table_group='parameters_table_group')
     # Check the table workspaces containing the polynomial coefficients
     assert AnalysisDataService.doesExist('parameters_table_group')
     workspace = mtd['parameters_table_group']
     for tube_number, name in enumerate(workspace.getNames()):
         assert name == f'parameters_table_group_{tube_number}'
         assert AnalysisDataService.doesExist(name)
     # Check the values of the coefficients for the first and last tube
     for tube_index, expected in [(0, {
             'A0': -0.446804,
             'A1': 0.003513,
             'A2': 0.0
     }), (15, {
             'A0': -0.452107,
             'A1': 0.003528,
             'A2': 0.00
     })]:
         workspace = mtd[f'parameters_table_group_{tube_index}']
         for row in workspace:
             if row['Name'] in expected:
                 self.assertAlmostEqual(expected[row['Name']],
                                        row['Value'],
                                        delta=1.e-6)
     DeleteWorkspaces(['CalibTable', 'parameters_table_group', 'PeakTable'])
Example #5
0
def apply_calibration(workspace: WorkspaceTypes, calibration_table: InputTable,
                      output_workspace: Optional[str] = None, show_instrument: bool = False) -> Workspace2D:
    r"""
    Calibrate the detector positions with an input table, and open the instrument view if so requested.

    :param workspace: input Workspace2D containing total neutron counts per pixel
    :param calibration_table: a TableWorskpace containing one column for detector ID and one column
    for its calibrated XYZ coordinates, in meters
    :param output_workspace: name of the output workspace containing calibrated detectors. If `None`, then
        the output workspace name will be the input workspace plus the suffix `_calibrated`
    :param show_instrument: open the instrument view for `output_workspace`

    :raises AssertionError: either `workspace` or `calibration_table` are not found
    """
    assert AnalysisDataService.doesExist(str(workspace)), f'No worksapce {str(workspace)} found'
    assert AnalysisDataService.doesExist(str(calibration_table)), f'No table {str(calibration_table)} found'
    if output_workspace is None:
        output_workspace = str(workspace) + '_calibrated'

    CloneWorkspace(InputWorkspace=workspace, OutputWorkspace=output_workspace)
    ApplyCalibration(Workspace=output_workspace, CalibrationTable=calibration_table)

    if show_instrument is True and None not in (InstrumentViewPresenter, InstrumentViewPresenter):
        instrument_presenter = QAppThreadCall(InstrumentViewPresenter)(mtd[output_workspace])
        QAppThreadCall(instrument_presenter.show_view)()

    return mtd[output_workspace]
Example #6
0
    def chop_data(self, split_ws_name=None, info_ws_name=None, do_tof_correction=False):
        """
        chop data and save to GSAS file
        :param split_ws_name:
        :param info_ws_name:
        :param TOF correction
        :return:
        """
        # get data file names, splitters workspace and output directory from reduction setup object
        raw_file_name = self._reductionSetup.locate_event_nexus()
        if split_ws_name is None:
            split_ws_name, info_ws_name = self._reductionSetup.get_splitters(throw_not_set=True)
        elif info_ws_name is None:
            raise RuntimeError(
                'Splitters workspace name must be given with information workspace name.')
        useless, output_directory = self._reductionSetup.get_chopped_directory(
            True, nexus_only=True)

        if do_tof_correction:
            raise RuntimeError('Not implemented for TOF correction yet.')

        # get number of target workspace
        number_target_ws, is_epoch_time = chop_utility.get_number_chopped_ws(split_ws_name)

        # load data from file to workspace
        event_ws_name = os.path.split(raw_file_name)[1].split('.')[0]
        mantid_helper.load_nexus(data_file_name=raw_file_name,
                                 output_ws_name=event_ws_name, meta_data_only=False)

        if number_target_ws < MAX_CHOPPED_WORKSPACE_IN_MEM:
            # chop event workspace with regular method
            # TODO/DEBUG - Split workspace won't be deleted at this stage
            status, ret_obj = mantid_helper.split_event_data(raw_ws_name=event_ws_name,
                                                             split_ws_name=split_ws_name,
                                                             info_table_name=info_ws_name,
                                                             target_ws_name=None,
                                                             tof_correction=do_tof_correction,
                                                             output_directory=output_directory,
                                                             delete_split_ws=False)
        else:
            # chop event workspace to too many target workspaces which cannot be hold in memory
            # simultaneously
            status, ret_obj = self.chop_data_large_number_targets(event_ws_name,
                                                                  tof_correction=do_tof_correction,
                                                                  output_dir=output_directory,
                                                                  is_epoch_time=is_epoch_time,
                                                                  num_target_ws=number_target_ws,
                                                                  delete_split_ws=True)

        # TODO - NIGHT (Nice) - save the split workspace for future reference
        # delete raw workspace
        # TODO/ISSUE/NOWNOW - Requiring a user option for this!
        print('[INFO] Deleting raw event workspace {0} which {1} exists.'
              ''.format(event_ws_name, AnalysisDataService.doesExist(event_ws_name)))
        if AnalysisDataService.doesExist(event_ws_name):
            mantid_helper.delete_workspace(event_ws_name)

        return status, ret_obj
Example #7
0
    def split_to_single_bank(self, gss_ws_name):
        """
        Split a multiple-bank GSAS workspace to a set of single-spectrum MatrixWorkspace
        Parameters
        ----------
        gss_ws_name

        Returns
        -------
        Name of grouped workspace and list
        """
        # check
        assert isinstance(gss_ws_name, str)
        assert AnalysisDataService.doesExist(gss_ws_name)

        # get workspace
        gss_ws = AnalysisDataService.retrieve(gss_ws_name)

        ws_list = list()
        angle_list = list()

        if gss_ws.getNumberHistograms() == 1:
            # input is already a single-spectrum workspace
            ws_list.append(gss_ws_name)
        else:
            num_spec = gss_ws.getNumberHistograms()

            for i_ws in range(num_spec):
                # split this one to a single workspace
                out_ws_name = '%s_bank%d' % (gss_ws_name, i_ws + 1)
                # also can use ExtractSpectra()
                simpleapi.CropWorkspace(InputWorkspace=gss_ws_name,
                                        OutputWorkspace=out_ws_name,
                                        StartWorkspaceIndex=i_ws,
                                        EndWorkspaceIndex=i_ws)
                assert AnalysisDataService.doesExist(out_ws_name)
                ws_list.append(out_ws_name)

            # END-FOR
        # END-IF

        # calculate bank angles
        for ws_name in ws_list:
            bank_angle = calculate_bank_angle(ws_name)
            angle_list.append(bank_angle)

        # group all the workspace
        ws_group_name = gss_ws_name + '_group'
        simpleapi.GroupWorkspaces(InputWorkspaces=ws_list,
                                  OutputWorkspace=ws_group_name)

        self._braggDataDict[ws_group_name] = (gss_ws_name, ws_list)

        return ws_group_name, ws_list, angle_list
    def test_add_workspace_to_ADS_adds_workspace_to_ads_in_correct_group_structure(
            self):
        workspace = CreateWorkspace([0, 0], [0, 0])
        workspace_name = 'test_workspace_name'
        workspace_directory = 'root/'

        self.model.add_workspace_to_ADS(workspace, workspace_name,
                                        workspace_directory)

        self.assertTrue(AnalysisDataService.doesExist(workspace_name))
        self.assertTrue(AnalysisDataService.doesExist('root'))
def _removeWorkspace(workspace_name):
    """Remove the workspace with the given name, including any child workspaces if it
    is a group. If a corresponding monitors workspace exists, remove that too."""
    if AnalysisDataService.doesExist(workspace_name):
        workspace = AnalysisDataService.retrieve(workspace_name)
        if isinstance(workspace, WorkspaceGroup):
            # Remove child workspaces first
            while workspace.getNumberOfEntries():
                _removeWorkspace(workspace[0].name())
        AnalysisDataService.remove(workspace_name)
    # If a corresponding monitors workspace also exists, remove that too
    if AnalysisDataService.doesExist(_monitorWorkspace(workspace_name)):
        _removeWorkspace(_monitorWorkspace(workspace_name))
Example #10
0
def _removeWorkspace(workspace_name):
    """Remove the workspace with the given name, including any child workspaces if it
    is a group. If a corresponding monitors workspace exists, remove that too."""
    if AnalysisDataService.doesExist(workspace_name):
        workspace = AnalysisDataService.retrieve(workspace_name)
        if isinstance(workspace, WorkspaceGroup):
            # Remove child workspaces first
            while workspace.getNumberOfEntries():
                _removeWorkspace(workspace[0].name())
        AnalysisDataService.remove(workspace_name)
    # If a corresponding monitors workspace also exists, remove that too
    if AnalysisDataService.doesExist(_monitorWorkspace(workspace_name)):
        _removeWorkspace(_monitorWorkspace(workspace_name))
Example #11
0
 def update_log_workspace_group(self):
     # both ws and name needed in event a ws is renamed and ws.name() is no longer correct
     if not self._log_workspaces:
         self.create_log_workspace_group()
     else:
         for log in self._log_names:
             if not ADS.doesExist(log):
                 self.make_log_table(log)
                 self._log_workspaces.add(log)
         if not ADS.doesExist("run_info"):
             self.make_runinfo_table()
             self._log_workspaces.add("run_info")
     # update log tables
     for irow, (ws_name, ws) in enumerate(self._loaded_workspaces.items()):
         self.add_log_to_table(ws_name, ws, irow)  # rename write_log_row
Example #12
0
    def retrieve_hkl_from_spice_table(self):
        """ Get averaged HKL from SPICE table
        HKL will be averaged from SPICE table by assuming the value in SPICE might be right
        :return:
        """
        # get SPICE table
        spice_table_name = get_spice_table_name(self._myExpNumber, self._myScanNumber)
        assert AnalysisDataService.doesExist(spice_table_name), 'Spice table for Exp %d Scan %d cannot be found.' \
                                                                '' % (self._myExpNumber, self._myScanNumber)

        spice_table_ws = AnalysisDataService.retrieve(spice_table_name)

        # get HKL column indexes
        h_col_index = spice_table_ws.getColumnNames().index('h')
        k_col_index = spice_table_ws.getColumnNames().index('k')
        l_col_index = spice_table_ws.getColumnNames().index('l')

        # scan each Pt.
        hkl = numpy.array([0., 0., 0.])

        num_rows = spice_table_ws.rowCount()
        for row_index in range(num_rows):
            mi_h = spice_table_ws.cell(row_index, h_col_index)
            mi_k = spice_table_ws.cell(row_index, k_col_index)
            mi_l = spice_table_ws.cell(row_index, l_col_index)
            hkl += numpy.array([mi_h, mi_k, mi_l])
        # END-FOR

        self._spiceHKL = hkl/num_rows

        return
Example #13
0
def add_directory_structure(dirs):
    """
    create the nested WorkspaceGroup structure in the ADS specified by the
    stored directory attribute.
    dirs = ["dir1", "dir2"] eg. ['Muon Data', 'MUSR72105', 'MUSR72105 Raw Data']
    """
    if not dirs:
        return
    if len(dirs) > len(set(dirs)):
        raise ValueError("Group names must be unique")

    for directory in dirs:
        if not AnalysisDataService.doesExist(directory):
            workspace_group = WorkspaceGroup()
            AnalysisDataService.addOrReplace(directory, workspace_group)
        elif not isinstance(AnalysisDataService.retrieve(directory),
                            WorkspaceGroup):
            AnalysisDataService.remove(directory)
            workspace_group = WorkspaceGroup()
            AnalysisDataService.addOrReplace(directory, workspace_group)
        else:
            # exists and is a workspace group
            pass

    # Create the nested group structure in the ADS
    previous_dir = ""
    for i, directory in enumerate(dirs):
        if i == 0:
            previous_dir = directory
            continue
        if not AnalysisDataService.retrieve(previous_dir).__contains__(
                directory):
            AnalysisDataService.retrieve(previous_dir).add(directory)
        previous_dir = directory
    def runTest(self):
        UseCompatibilityMode()
        LARMOR()
        Set1D()
        Detector("DetectorBench")
        MaskFile(
            'USER_LARMOR_151B_LarmorTeam_80tubes_BenchRot1p4_M4_r3699.txt')
        Gravity(True)
        AddRuns(('13065', '13065'), 'LARMOR', 'nxs', lowMem=True)

        AssignSample('13065-add.nxs')
        WavRangeReduction(2, 4, DefaultTrans)

        # Clean up
        for element in AnalysisDataService.getObjectNames():
            if AnalysisDataService.doesExist(
                    element) and element != "13065p1rear_1D_2.0_4.0":
                AnalysisDataService.remove(element)

        paths = [
            os.path.join(config['defaultsave.directory'],
                         'LARMOR00013065-add.nxs'),
            os.path.join(config['defaultsave.directory'], 'SANS2D00013065.log')
        ]  # noqa
        for path in paths:
            if os.path.exists(path):
                os.remove(path)
Example #15
0
 def load_files(self, filenames_string, xunit):
     self._last_added = []
     filenames = [name.strip() for name in filenames_string.split(",")]
     for filename in filenames:
         ws_name = self._generate_workspace_name(filename, xunit)
         if ws_name not in self._loaded_workspaces:
             try:
                 if not ADS.doesExist(ws_name):
                     ws = Load(filename, OutputWorkspace=ws_name)
                     if xunit != "TOF":
                         ConvertUnits(InputWorkspace=ws, OutputWorkspace=ws_name, Target=xunit)
                 else:
                     ws = ADS.retrieve(ws_name)
                 if ws.getNumberHistograms() == 1:
                     self._loaded_workspaces[ws_name] = ws
                     if ws_name not in self._background_workspaces:
                         self._background_workspaces[ws_name] = None
                     self._last_added.append(ws_name)
                     self.add_log_to_table(ws_name, ws)
                 else:
                     logger.warning(
                         f"Invalid number of spectra in workspace {ws_name}. Skipping loading of file.")
             except RuntimeError as e:
                 logger.error(
                     f"Failed to load file: {filename}. Error: {e}. \n Continuing loading of other files.")
         else:
             logger.warning(f"File {ws_name} has already been loaded")
Example #16
0
    def get_weighted_peak_centres(self):
        """ Get the peak centers found in peak workspace.
        Guarantees: the peak centers and its weight (detector counts) are exported
        :return: 2-tuple: list of 3-tuple (Qx, Qy, Qz)
                          list of double (Det_Counts)
        """
        # get PeaksWorkspace
        if AnalysisDataService.doesExist(self._myPeakWorkspaceName) is False:
            raise RuntimeError('PeaksWorkspace %s does ot exit.' % self._myPeakWorkspaceName)

        peak_ws = AnalysisDataService.retrieve(self._myPeakWorkspaceName)

        # get peak center, peak intensity and etc.
        peak_center_list = list()
        peak_intensity_list = list()
        num_peaks = peak_ws.getNumberPeaks()
        for i_peak in range(num_peaks):
            peak_i = peak_ws.getPeak(i_peak)
            center_i = peak_i.getQSampleFrame()
            intensity_i = peak_i.getIntensity()
            peak_center_list.append((center_i.X(), center_i.Y(), center_i.Z()))
            peak_intensity_list.append(intensity_i)
        # END-FOR

        return peak_center_list, peak_intensity_list
Example #17
0
    def _update_high_q(self, _ws=None):
        """
            Update High-Q data set
        """
        self._high_q_data = self.update_data(self._content.high_q_combo,
                                             None,
                                             None,
                                             self._content.high_scale_edit)
        self._high_q_modified = False

        file_in = str(self._content.high_q_combo.lineEdit().text())
        if len(file_in.strip()) == 0:
            self._high_q_data = None
        elif os.path.isfile(file_in) or AnalysisDataService.doesExist(file_in):
            self._high_q_data = DataSet(file_in)
            try:
                self._high_q_data.load(True)
            except (AttributeError, ImportError, NameError, TypeError, ValueError, Warning):
                self._high_q_data = None
                util.set_valid(self._content.high_q_combo.lineEdit(), False)
                QtGui.QMessageBox.warning(self, "Error loading file",
                                          "Could not load %s.\nMake sure you pick the XML output from the reduction." % file_in)
                return
            self._content.high_scale_edit.setText("1.0")
            util.set_valid(self._content.high_q_combo.lineEdit(), True)
        else:
            self._high_q_data = None
            util.set_valid(self._content.high_q_combo.lineEdit(), False)
Example #18
0
    def _update_high_q(self, _ws=None):
        """
            Update High-Q data set
        """
        self._high_q_data = self.update_data(self._content.high_q_combo, None,
                                             None,
                                             self._content.high_scale_edit)
        self._high_q_modified = False

        file_in = str(self._content.high_q_combo.lineEdit().text())
        if len(file_in.strip()) == 0:
            self._high_q_data = None
        elif os.path.isfile(file_in) or AnalysisDataService.doesExist(file_in):
            self._high_q_data = DataSet(file_in)
            try:
                self._high_q_data.load(True)
            except (AttributeError, ImportError, NameError, TypeError,
                    ValueError, Warning):
                self._high_q_data = None
                util.set_valid(self._content.high_q_combo.lineEdit(), False)
                QtGui.QMessageBox.warning(
                    self, "Error loading file",
                    "Could not load %s.\nMake sure you pick the XML output from the reduction."
                    % file_in)
                return
            self._content.high_scale_edit.setText("1.0")
            util.set_valid(self._content.high_q_combo.lineEdit(), True)
        else:
            self._high_q_data = None
            util.set_valid(self._content.high_q_combo.lineEdit(), False)
 def testCalculateEfficiencyCorretionInvalidStoreADSCheck(self):
     self.cleanup()
     corr_wksp = CalculateEfficiencyCorrection(
                              WavelengthRange=self._wavelengths,
                              Alpha=self._alpha,
                              StoreInADS=False)
     self.assertFalse(AnalysisDataService.doesExist(corr_wksp.name()))
Example #20
0
 def testCalculateEfficiencyCorretionInvalidStoreADSCheck(self):
     self.cleanup()
     corr_wksp = CalculateEfficiencyCorrection(
         WavelengthRange=self._wavelengths,
         Alpha=self._alpha,
         StoreInADS=False)
     self.assertFalse(AnalysisDataService.doesExist(corr_wksp.name()))
Example #21
0
    def edit_matrix_workspace(sq_name,
                              scale_factor,
                              shift,
                              edited_sq_name=None):
        """
        Edit the matrix workspace of S(Q) by scaling and shift
        :param sq_name: name of the SofQ workspace
        :param scale_factor:
        :param shift:
        :param edited_sq_name: workspace for the edited S(Q)
        :return:
        """
        # get the workspace
        if AnalysisDataService.doesExist(sq_name) is False:
            raise RuntimeError(
                'S(Q) workspace {0} cannot be found in ADS.'.format(sq_name))

        if edited_sq_name is not None:
            simpleapi.CloneWorkspace(InputWorkspace=sq_name,
                                     OutputWorkspace=edited_sq_name)
            sq_ws = AnalysisDataService.retrieve(edited_sq_name)
        else:
            sq_ws = AnalysisDataService.retrieve(sq_name)

        # get the vector of Y
        sq_ws = sq_ws * scale_factor
        sq_ws = sq_ws + shift
        if sq_ws.name() != edited_sq_name:
            simpleapi.DeleteWorkspace(Workspace=edited_sq_name)
            simpleapi.RenameWorkspace(InputWorkspace=sq_ws,
                                      OutputWorkspace=edited_sq_name)

        assert sq_ws is not None, 'S(Q) workspace cannot be None.'
        print('[DB...BAT] S(Q) workspace that is edit is {0}'.format(sq_ws))
Example #22
0
    def __init__(self, exp_number, scan_number_list, matrix_ws_name, scan_spectrum_map, spectrum_scan_map):
        """
        initialization
        :param exp_number:
        :param scan_number_list:
        :param matrix_ws_name:
        :param scan_spectrum_map:
        :param spectrum_scan_map:
        """
        # check input
        check_integer('Experiment number', exp_number)
        check_list('Scan numbers', scan_number_list)
        check_string('Workspace2D name', matrix_ws_name)
        check_dictionary('Scan number spectrum number mapping', scan_spectrum_map)
        check_dictionary('Spectrum number scan number mapping', spectrum_scan_map)

        if AnalysisDataService.doesExist(matrix_ws_name) is False:
            raise RuntimeError('Workspace {} does not exist.'.format(matrix_ws_name))

        # store
        self._exp_number = exp_number
        self._scan_number_list = scan_number_list[:]
        self._matrix_ws_name = matrix_ws_name
        self._scan_spectrum_map = scan_spectrum_map
        self._spectrum_scan_map = spectrum_scan_map

        # TODO - 20180814 - Add pt number, rio name and integration direction for future check!

        # others
        self._model_ws_name = None

        return
    def _group_workspaces(self, workspaces, output_ws_name):
        """
        Groups all the given workspaces into a group with the given name. If the group
        already exists it will add them to that group.
        """
        if not self.getProperty(Prop.GROUP_TOF).value:
            return

        workspaces = self._collapse_workspace_groups(workspaces)

        if not workspaces:
            return

        if AnalysisDataService.doesExist(output_ws_name):
            ws_group = AnalysisDataService.retrieve(output_ws_name)
            if not isinstance(ws_group, WorkspaceGroup):
                raise RuntimeError(
                    'Cannot group TOF workspaces, a workspace called TOF already exists'
                )
            else:
                for ws in workspaces:
                    if ws not in ws_group:
                        ws_group.add(ws)
        else:
            alg = self.createChildAlgorithm("GroupWorkspaces")
            alg.setProperty("InputWorkspaces", workspaces)
            alg.setProperty("OutputWorkspace", output_ws_name)
            alg.execute()
            ws_group = alg.getProperty("OutputWorkspace").value
        AnalysisDataService.addOrReplace(output_ws_name, ws_group)
        return ws_group
Example #24
0
    def test_correct_tube_to_ideal_tube(self):
        # Verify the quadratic fit works
        data = self.y_quad_data

        # fit the Y-coordinates to the pixel positions with a default quadratic function
        fitted_coordinates = correct_tube_to_ideal_tube(
            data['tube_points'],
            data['ideal_tube_coordinates'],
            data['detector_count'],
            parameters_table='parameters')

        # Verify the fitted coordinates are the ideal_tube_coordinates
        assert_allclose(
            [fitted_coordinates[int(n)] for n in data['tube_points']],
            data['ideal_tube_coordinates'],
            atol=0.0001)
        # Compare fitting coefficients
        assert AnalysisDataService.doesExist('parameters')
        # here retrieve the fitting coefficients from the 'parameters' table and compare to the expected values
        expected = data['coefficients']
        for row in mtd['parameters']:
            if row['Name'] in expected:
                self.assertAlmostEqual(row['Value'],
                                       expected[row['Name']],
                                       places=6)
        # a bit of clean-up
        DeleteWorkspaces([
            'PolyFittingWorkspace', 'QF_NormalisedCovarianceMatrix',
            'QF_Parameters', 'QF_Workspace', 'parameters'
        ])
Example #25
0
    def test_batch_reduction_on_multiperiod_file(self):
        # Arrange
        # Build the data information
        data_builder = get_data_builder(SANSFacility.ISIS)
        data_builder.set_sample_scatter("SANS2D0005512")

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file_director = StateDirectorISIS(data_info)
        user_file_director.set_user_file("MASKSANS2Doptions.091A")
        # Set the reduction mode to LAB
        user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.LAB)
        state = user_file_director.construct()

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = ["5512p1rear_1D_2.0_14.0Phi-45.0_45.0", "5512p2rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p3rear_1D_2.0_14.0Phi-45.0_45.0", "5512p4rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p5rear_1D_2.0_14.0Phi-45.0_45.0", "5512p6rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p7rear_1D_2.0_14.0Phi-45.0_45.0", "5512p8rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p9rear_1D_2.0_14.0Phi-45.0_45.0", "5512p10rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p11rear_1D_2.0_14.0Phi-45.0_45.0", "5512p12rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p13rear_1D_2.0_14.0Phi-45.0_45.0"]
        for element in expected_workspaces:
            self.assertTrue(AnalysisDataService.doesExist(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
Example #26
0
    def edit_matrix_workspace(sq_name, scale_factor, shift, edited_sq_name=None):
        """
        Edit the matrix workspace of S(Q) by scaling and shift
        :param sq_name: name of the SofQ workspace
        :param scale_factor:
        :param shift:
        :param edited_sq_name: workspace for the edited S(Q)
        :return:
        """
        # get the workspace
        if AnalysisDataService.doesExist(sq_name) is False:
            raise RuntimeError('S(Q) workspace {0} cannot be found in ADS.'.format(sq_name))

        if edited_sq_name is not None:
            simpleapi.CloneWorkspace(InputWorkspace=sq_name, OutputWorkspace=edited_sq_name)
            sq_ws = AnalysisDataService.retrieve(edited_sq_name)
        else:
            sq_ws = AnalysisDataService.retrieve(sq_name)

        # get the vector of Y
        sq_ws = sq_ws * scale_factor
        sq_ws = sq_ws + shift
        if sq_ws.name() != edited_sq_name:
            simpleapi.DeleteWorkspace(Workspace=edited_sq_name)
            simpleapi.RenameWorkspace(InputWorkspace=sq_ws, OutputWorkspace=edited_sq_name)

        assert sq_ws is not None, 'S(Q) workspace cannot be None.'
        print('[DB...BAT] S(Q) workspace that is edit is {0}'.format(sq_ws))
Example #27
0
    def load_bragg_file(self, file_name):
        """
        Load Bragg diffraction file (including 3-column data file, GSAS file) for Rietveld
        """
        # load with different file type
        base_file_name = os.path.basename(file_name).lower()
        gss_ws_name = os.path.basename(file_name).split('.')[0]
        if base_file_name.endswith('.gss') or base_file_name.endswith('.gsa') or base_file_name.endswith('.gda'):
            simpleapi.LoadGSS(Filename=file_name,
                              OutputWorkspace=gss_ws_name)
        elif base_file_name.endswith('.nxs'):
            simpleapi.LoadNexusProcessed(Filename=file_name, OutputWorkspace=gss_ws_name)
            simpleapi.ConvertUnits(InputWorkspace=gss_ws_name, OutputWorkspace=gss_ws_name, EMode='Elastic', Target='TOF')
        elif base_file_name.endswith('.dat'):
            simpleapi.LoadAscii(Filename=file_name,
                                OutputWorkspace=gss_ws_name,
                                Unit='TOF')
        else:
            raise RuntimeError('File %s is not of a supported type.' % file_name)
        self._braggDataList.append(gss_ws_name)

        # check
        assert AnalysisDataService.doesExist(gss_ws_name)
        angle_list = AddieDriver.calculate_bank_angle(gss_ws_name)

        return gss_ws_name, angle_list
    def change_plot_guess(self, plot_guess, parameter_dict):
        try:
            fit_function = parameter_dict['Function']
            data_ws_name = parameter_dict['InputWorkspace']
        except KeyError:
            return
        if self.context.workspace_suffix == MUON_ANALYSIS_SUFFIX:
            guess_ws_name = MUON_ANALYSIS_GUESS_WS
        elif self.context.workspace_suffix == FREQUENCY_DOMAIN_ANALYSIS_SUFFIX:
            guess_ws_name = FREQUENCY_DOMAIN_ANALYSIS_GUESS_WS
        else:
            guess_ws_name = '__unknown_interface_fitting_guess'

        # Handle case of function removed
        if fit_function is None and plot_guess:
            self.context.fitting_context.notify_plot_guess_changed(
                plot_guess, None)
        elif fit_function is None or data_ws_name == '':
            return
        else:
            # evaluate the current function on the workspace
            if plot_guess:
                try:
                    EvaluateFunction(InputWorkspace=data_ws_name,
                                     Function=fit_function,
                                     StartX=parameter_dict['StartX'],
                                     EndX=parameter_dict['EndX'],
                                     OutputWorkspace=guess_ws_name)
                except RuntimeError:
                    mantid.logger.error('Could not evaluate the function.')
                    return

            if AnalysisDataService.doesExist(guess_ws_name):
                self.context.fitting_context.notify_plot_guess_changed(
                    plot_guess, guess_ws_name)
Example #29
0
 def load_files(self, filenames_string):
     self._last_added = []
     filenames = [name.strip() for name in filenames_string.split(",")]
     for filename in filenames:
         ws_name = self._generate_workspace_name(filename)
         if ws_name not in self._loaded_workspaces:
             try:
                 if not ADS.doesExist(ws_name):
                     ws = Load(filename, OutputWorkspace=ws_name)
                 else:
                     ws = ADS.retrieve(ws_name)
                 if ws.getNumberHistograms() == 1:
                     self._loaded_workspaces[ws_name] = ws
                     if ws_name not in self._bg_sub_workspaces:
                         self._bg_sub_workspaces[ws_name] = None
                     if ws_name not in self._bg_params:
                         self._bg_params[ws_name] = []
                     self._last_added.append(ws_name)
                 else:
                     logger.warning(
                         f"Invalid number of spectra in workspace {ws_name}. Skipping loading of file.")
             except RuntimeError as e:
                 logger.error(
                     f"Failed to load file: {filename}. Error: {e}. \n Continuing loading of other files.")
         else:
             logger.warning(f"File {ws_name} has already been loaded")
     self.update_log_workspace_group()
Example #30
0
    def _isValidWorkspace(self, workspace_name, workspace_id):
        """Returns true, if the workspace of name workspace_name is a valid
        reflectometry workspace of type workspace_id. Otherwise, deletes the
        workspace if the user requested to reload invalid workspaces, or raises
        an error otherwise
        """
        if not _hasWorkspaceID(workspace_name, workspace_id):
            message = 'Workspace ' + workspace_name + ' exists but is not a ' + workspace_id
            if self._reload:
                self.log().information(message)
                _removeWorkspace(workspace_name)
                return False
            else:
                raise RuntimeError(message)

        # For event workspaces, the monitors workspace must also exist, otherwise it's not valid
        if workspace_id == "EventWorkspace":
            if not AnalysisDataService.doesExist(
                    _monitorWorkspace(workspace_name)):
                message = 'Monitors workspace ' + workspace_name + '_monitors does not exist'
                if self._reload:
                    self.log().information(message)
                    _removeWorkspace(workspace_name)
                    return False
                else:
                    raise RuntimeError(message)
        return True
Example #31
0
    def _plot_vanadium_curves():
        van_curve_twin_ws = "__engggui_vanadium_curves_twin_ws"

        if Ads.doesExist(van_curve_twin_ws):
            DeleteWorkspace(van_curve_twin_ws)
        CloneWorkspace(InputWorkspace="engggui_vanadium_curves", OutputWorkspace=van_curve_twin_ws)
        van_curves_ws = Ads.retrieve(van_curve_twin_ws)

        fig = plt.figure()
        gs = gridspec.GridSpec(1, 2)
        curve_plot_bank_1 = fig.add_subplot(gs[0], projection="mantid")
        curve_plot_bank_2 = fig.add_subplot(gs[1], projection="mantid")

        curve_plot_bank_1.plot(van_curves_ws, wkspIndex=0)
        curve_plot_bank_1.plot(van_curves_ws, wkspIndex=1)
        curve_plot_bank_1.plot(van_curves_ws, wkspIndex=2)
        curve_plot_bank_1.set_title("Engg GUI Vanadium Curves Bank 1")
        curve_plot_bank_1.legend(["Data", "Calc", "Diff"])

        curve_plot_bank_2.plot(van_curves_ws, wkspIndex=3)
        curve_plot_bank_2.plot(van_curves_ws, wkspIndex=4)
        curve_plot_bank_2.plot(van_curves_ws, wkspIndex=5)
        curve_plot_bank_2.set_title("Engg GUI Vanadium Curves Bank 2")
        curve_plot_bank_2.legend(["Data", "Calc", "Diff"])

        fig.show()
Example #32
0
    def append_data_file(self, data_file, workspace=None):
        """
            Append a file to be processed.
            @param data_file: name of the file to be processed
            @param workspace: optional name of the workspace for this data,
                default will be the name of the file
            TODO: this needs to be an ordered list
        """
        if data_file is None:
            if AnalysisDataService.doesExist(workspace):
                self._data_files[workspace] = None
                return
            else:
                raise RuntimeError(
                    "Trying to append a data set without a file name or an existing workspace."
                )
        if isinstance(data_file, list):
            if workspace is None:
                # Use the first file to determine the workspace name
                workspace = extract_workspace_name(data_file[0])
        else:
            if workspace is None:
                workspace = extract_workspace_name(data_file)

        self._data_files[workspace] = data_file
Example #33
0
    def _generate_tof_fit_workspace(difa, difc, tzero, bank):
        bank_ws = Ads.retrieve(CalibrationModel._generate_table_workspace_name(bank))

        x_val = []
        y_val = []
        y2_val = []

        difa_to_plot = difa
        difc_to_plot = difc
        tzero_to_plot = tzero

        for irow in range(0, bank_ws.rowCount()):
            x_val.append(bank_ws.cell(irow, 0))
            y_val.append(bank_ws.cell(irow, 5))
            y2_val.append(pow(x_val[irow], 2) * difa_to_plot + x_val[irow] * difc_to_plot + tzero_to_plot)

        ws1 = CreateWorkspace(DataX=x_val,
                              DataY=y_val,
                              UnitX="Expected Peaks Centre (dSpacing A)",
                              YUnitLabel="Fitted Peaks Centre(TOF, us)")
        ws2 = CreateWorkspace(DataX=x_val, DataY=y2_val)

        output_ws = "engggui_tof_peaks_bank_" + str(bank)
        if Ads.doesExist(output_ws):
            DeleteWorkspace(output_ws)

        AppendSpectra(ws1, ws2, OutputWorkspace=output_ws)
        DeleteWorkspace(ws1)
        DeleteWorkspace(ws2)
Example #34
0
    def _update_high_q(self, ws=None):
        """
            Update High-Q data set
        """
        self._high_q_data = self.update_data(self._content.high_q_combo,
                                             None,
                                             None,
                                             self._content.high_scale_edit)
        self._high_q_modified = False

        file = str(self._content.high_q_combo.lineEdit().text())
        if len(file.strip())==0:
            self._high_q_data = None
        elif os.path.isfile(file) or AnalysisDataService.doesExist(file):
            self._high_q_data = DataSet(file)
            try:
                self._high_q_data.load(True)
            except:
                self._high_q_data = None
                util.set_valid(self._content.high_q_combo.lineEdit(), False)
                QtGui.QMessageBox.warning(self, "Error loading file", "Could not load %s.\nMake sure you pick the XML output from the reduction." % file)
                return
            self._content.high_scale_edit.setText("1.0")
            npts = self._high_q_data.get_number_of_points()
            util.set_valid(self._content.high_q_combo.lineEdit(), True)
        else:
            self._high_q_data = None
            util.set_valid(self._content.high_q_combo.lineEdit(), False)
Example #35
0
def load_bragg_by_filename(file_name):
    """
    Load Bragg diffraction file (including 3-column data file, GSAS file) for Rietveld
    """
    # load with different file type
    base_file_name = os.path.basename(file_name).lower()
    gss_ws_name = os.path.basename(file_name).split('.')[0]
    if base_file_name.endswith('.gss') or base_file_name.endswith(
            '.gsa') or base_file_name.endswith('.gda'):
        simpleapi.LoadGSS(Filename=file_name, OutputWorkspace=gss_ws_name)
    elif base_file_name.endswith('.nxs'):
        simpleapi.LoadNexusProcessed(Filename=file_name,
                                     OutputWorkspace=gss_ws_name)
        simpleapi.ConvertUnits(InputWorkspace=gss_ws_name,
                               OutputWorkspace=gss_ws_name,
                               EMode='Elastic',
                               Target='TOF')
    elif base_file_name.endswith('.dat'):
        simpleapi.LoadAscii(Filename=file_name,
                            OutputWorkspace=gss_ws_name,
                            Unit='TOF')
    else:
        raise RuntimeError('File %s is not of a supported type.' % file_name)

    # check
    assert AnalysisDataService.doesExist(gss_ws_name)
    angle_list = addie.utilities.workspaces.calculate_bank_angle(gss_ws_name)

    return gss_ws_name, angle_list
Example #36
0
    def _update_high_q(self, ws=None):
        """
            Update High-Q data set
        """
        self._high_q_data = self.update_data(self._content.high_q_combo, None,
                                             None,
                                             self._content.high_scale_edit)
        self._high_q_modified = False

        file = str(self._content.high_q_combo.lineEdit().text())
        if len(file.strip()) == 0:
            self._high_q_data = None
        elif os.path.isfile(file) or AnalysisDataService.doesExist(file):
            self._high_q_data = DataSet(file)
            try:
                self._high_q_data.load(True)
            except:
                self._high_q_data = None
                util.set_valid(self._content.high_q_combo.lineEdit(), False)
                QtGui.QMessageBox.warning(
                    self, "Error loading file",
                    "Could not load %s.\nMake sure you pick the XML output from the reduction."
                    % file)
                return
            self._content.high_scale_edit.setText("1.0")
            npts = self._high_q_data.get_number_of_points()
            util.set_valid(self._content.high_q_combo.lineEdit(), True)
        else:
            self._high_q_data = None
            util.set_valid(self._content.high_q_combo.lineEdit(), False)
Example #37
0
    def _group_workspaces(self, workspaces, output_ws_name):
        """
        Groups all the given workspaces into a group with the given name. If the group
        already exists it will add them to that group.
        """
        if len(workspaces) < 1:
            return

        workspaces = self._collapse_workspace_groups(workspaces)

        if not workspaces:
            return

        if AnalysisDataService.doesExist(output_ws_name):
            ws_group = AnalysisDataService.retrieve(output_ws_name)
            if not isinstance(ws_group, WorkspaceGroup):
                raise RuntimeError(
                    'Cannot group TOF workspaces, a workspace called TOF already exists'
                )
            else:
                for ws in workspaces:
                    if ws not in ws_group:
                        ws_group.add(ws)
        else:
            alg = self.createChildAlgorithm("GroupWorkspaces")
            alg.setProperty("InputWorkspaces", list(workspaces))
            alg.setProperty("OutputWorkspace", output_ws_name)
            alg.execute()
            ws_group = alg.getProperty("OutputWorkspace").value
            # We can't add the group as an output property or it will duplicate
            # the history for the contained workspaces, so add it directly to
            # the ADS
            AnalysisDataService.addOrReplace(output_ws_name, ws_group)
    def retrieve_hkl_from_spice_table(self):
        """ Get averaged HKL from SPICE table
        HKL will be averaged from SPICE table by assuming the value in SPICE might be right
        :return:
        """
        # get SPICE table
        spice_table_name = get_spice_table_name(self._myExpNumber, self._myScanNumber)
        assert AnalysisDataService.doesExist(spice_table_name), 'Spice table for exp %d scan %d cannot be found.' \
                                                                '' % (self._myExpNumber, self._myScanNumber)

        spice_table_ws = AnalysisDataService.retrieve(spice_table_name)

        # get HKL column indexes
        h_col_index = spice_table_ws.getColumnNames().index('h')
        k_col_index = spice_table_ws.getColumnNames().index('k')
        l_col_index = spice_table_ws.getColumnNames().index('l')

        # scan each Pt.
        hkl = numpy.array([0., 0., 0.])

        num_rows = spice_table_ws.rowCount()
        for row_index in xrange(num_rows):
            mi_h = spice_table_ws.cell(row_index, h_col_index)
            mi_k = spice_table_ws.cell(row_index, k_col_index)
            mi_l = spice_table_ws.cell(row_index, l_col_index)
            hkl += numpy.array([mi_h, mi_k, mi_l])
        # END-FOR

        self._spiceHKL = hkl/num_rows

        return
    def get_weighted_peak_centres(self):
        """ Get the peak centers found in peak workspace.
        Guarantees: the peak centers and its weight (detector counts) are exported
        :return: 2-tuple: list of 3-tuple (Qx, Qy, Qz)
                          list of double (Det_Counts)
        """
        # get PeaksWorkspace
        if AnalysisDataService.doesExist(self._myPeakWorkspaceName) is False:
            raise RuntimeError('PeaksWorkspace %s does ot exit.' % self._myPeakWorkspaceName)

        peak_ws = AnalysisDataService.retrieve(self._myPeakWorkspaceName)

        # get peak center, peak intensity and etc.
        peak_center_list = list()
        peak_intensity_list = list()
        num_peaks = peak_ws.getNumberPeaks()
        for i_peak in xrange(num_peaks):
            peak_i = peak_ws.getPeak(i_peak)
            center_i = peak_i.getQSampleFrame()
            intensity_i = peak_i.getIntensity()
            peak_center_list.append((center_i.X(), center_i.Y(), center_i.Z()))
            peak_intensity_list.append(intensity_i)
        # END-FOR

        return peak_center_list, peak_intensity_list
Example #40
0
    def test_batch_reduction_on_period_time_sliced_wavelength_range_data(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D0005512")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D0005512")
        data_builder.set_sample_scatter_period(1)

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_filename = "MASKSANS2Doptions.091A"
        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_filename)

        state = user_file_director.get_all_states()
        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB

        state.slice.start_time = [1.0, 3.0]
        state.slice.end_time = [3.0, 5.0]

        start = [1.0, 1.0]
        end = [3.0, 2.0]
        state.wavelength.wavelength_low = start
        state.wavelength.wavelength_high = end

        state.adjustment.normalize_to_monitor.wavelength_low = start
        state.adjustment.normalize_to_monitor.wavelength_high = end

        state.adjustment.calculate_transmission.wavelength_low = start
        state.adjustment.calculate_transmission.wavelength_high = end

        state.adjustment.wavelength_and_pixel_adjustment.wavelength_low = start
        state.adjustment.wavelength_and_pixel_adjustment.wavelength_high = end

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = [
            "5512_p1rear_1D_1.0_2.0Phi-45.0_45.0_t1.00_T3.00",
            "5512_p1rear_1D_1.0_2.0Phi-45.0_45.0_t3.00_T5.00",
            "5512_p1rear_1D_1.0_3.0Phi-45.0_45.0_t1.00_T3.00",
            "5512_p1rear_1D_1.0_3.0Phi-45.0_45.0_t3.00_T5.00"
        ]
        for element in expected_workspaces:
            self.assertTrue(AnalysisDataService.doesExist(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
Example #41
0
    def export_to_rmcprofile(ws_name,
                             output_file_name,
                             comment='',
                             ws_index=0):
        """ Export a workspace 2D to a 2 column data for RMCProfile
        """
        # check inputs
        assert isinstance(
            ws_name,
            str), 'Workspace name {0} must be a string but not a {1}.'.format(
                ws_name, str(ws_name))
        assert isinstance(
            output_file_name, str
        ), 'Output file name {0} must be a string but not a {1}.'.format(
            output_file_name, type(output_file_name))
        assert isinstance(
            comment,
            str), 'Comment {0} must be a string but not a {1}.'.format(
                comment, type(comment))
        assert isinstance(
            ws_index,
            int), 'Workspace index must be an integer but not a {1}.'.format(
                ws_index, type(ws_index))

        # convert to point data from histogram
        simpleapi.ConvertToPointData(InputWorkspace=ws_name,
                                     OutputWorkspace=ws_name)

        # get workspace for vecX and vecY
        if AnalysisDataService.doesExist(ws_name):
            workspace = AnalysisDataService.retrieve(ws_name)
        else:
            raise RuntimeError(
                'Workspace {0} does not exist in ADS.'.format(ws_name))
        if not 0 <= ws_index < workspace.getNumberHistograms():
            raise RuntimeError(
                'Workspace index {0} is out of range.'.format(ws_index))

        vec_x = workspace.readX(ws_index)
        vec_y = workspace.readY(ws_index)

        # write to buffer
        wbuf = ''
        wbuf += '{0}\n'.format(len(vec_x))
        wbuf += '{0}\n'.format(comment)
        for index in range(len(vec_x)):
            wbuf += ' {0} {1}\n'.format(vec_x[index], vec_y[index])

        # write to file
        try:
            ofile = open(output_file_name, 'w')
            ofile.write(wbuf)
            ofile.close()
        except IOError as io_err:
            raise RuntimeError(
                'Unable to export data to file {0} in RMCProfile format due to {1}.'
                .format(output_file_name, io_err))

        return
 def testCalculateEfficiencyCorretionStoreADSCheck(self):
     self.cleanup()
     alg_test = run_algorithm("CalculateEfficiencyCorrection",
                              WavelengthRange=self._wavelengths,
                              Alpha=self._alpha,
                              OutputWorkspace=self._output_wksp)
     self.assertTrue(alg_test.isExecuted())
     self.assertTrue(AnalysisDataService.doesExist(self._output_wksp))
 def _display(masked_workspace):
     if masked_workspace and AnalysisDataService.doesExist(masked_workspace.name()):
         if PYQT4:
             instrument_win = mantidplot.getInstrumentView(masked_workspace.name())
             instrument_win.show()
         else:
             instrument_win = InstrumentViewPresenter(masked_workspace)
             instrument_win.view.show()
Example #44
0
    def test_that_load_dead_time_from_filename_places_table_in_ADS(self):
        filename = 'MUSR00022725.nsx'

        name = utils.load_dead_time_from_filename(filename)
        dead_time_table = AnalysisDataService.retrieve(name)

        self.assertEqual(name, 'MUSR00022725_deadTimes')
        self.assertTrue(AnalysisDataService.doesExist(name))
        self.assertTrue(isinstance(dead_time_table, ITableWorkspace))
Example #45
0
    def _create_sensitivity(self):
        if IS_IN_MANTIDPLOT and self.options_callback is not None:
            # Get patch information
            patch_ws = ""
            if AnalysisDataService.doesExist(self.patch_ws):
                patch_ws = self.patch_ws

            try:
                reduction_table_ws = self.options_callback()
                patch_output = AnalysisDataService.doesExist(patch_ws)
                filename = self._content.sensitivity_file_edit.text()
                script  = "ComputeSensitivity(Filename='%s',\n" % filename
                script += "                   ReductionProperties='%s',\n" % reduction_table_ws
                script += "                   OutputWorkspace='sensitivity',\n"
                script += "                   PatchWorkspace='%s')\n" % patch_ws
                mantidplot.runPythonScript(script, True)
            except:
                print "Could not compute sensitivity"
                print sys.exc_value
 def _workspaceExistsAndIsValid(self, workspace_name, isTrans):
     """Return true, if the given workspace exists in the ADS and is valid"""
     if not AnalysisDataService.doesExist(workspace_name):
         self.log().information('Workspace ' + workspace_name + ' does not exist')
         return False
     self.log().information('Workspace ' + workspace_name + ' exists')
     if not isTrans and self._slicingEnabled():
         return self._isValidWorkspace(workspace_name, "EventWorkspace")
     else:
         return self._isValidWorkspace(workspace_name, "Workspace2D")
 def _renameWorkspaceBasedOnRunNumber(self, workspace_name, isTrans):
     """Rename the given workspace based on its run number and a standard prefix"""
     new_name = self._prefixedName(_getRunNumberAsString(workspace_name), isTrans)
     if new_name != workspace_name:
         RenameWorkspace(InputWorkspace=workspace_name, OutputWorkspace=new_name)
         # Also rename the monitor workspace, if there is one
         if AnalysisDataService.doesExist(_monitorWorkspace(workspace_name)):
             RenameWorkspace(InputWorkspace=_monitorWorkspace(workspace_name),
                             OutputWorkspace=_monitorWorkspace(new_name))
     return new_name
    def set_data_ws_name(self, md_ws_name):
        """ Set the name of MDEventWorkspace with merged Pts.
        :param md_ws_name:
        :return:
        """
        assert isinstance(md_ws_name, str)
        assert AnalysisDataService.doesExist(md_ws_name)

        self._myDataMDWorkspaceName = md_ws_name

        return
Example #49
0
 def load_meta_data(cls, file_path, outputWorkspace):
     try:
         if IN_MANTIDPLOT:
             script = "LoadEventNexus(Filename='%s', OutputWorkspace='%s', MetaDataOnly=True)" % (file_path, outputWorkspace)
             execute_script(script)
             if not AnalysisDataService.doesExist(outputWorkspace):
                 return False
         else:
             LoadEventNexus(Filename=file_path, OutputWorkspace=outputWorkspace, MetaDataOnly=True)
         return True
     except:
         return False
Example #50
0
 def load_meta_data(cls, file_path, outputWorkspace):
     try:
         if IN_MANTIDPLOT:
             script = "LoadSpice2D(Filename='%s', OutputWorkspace='%s')" % (file_path, outputWorkspace)
             mantidplot.runPythonScript(script, True)
             if not AnalysisDataService.doesExist(outputWorkspace):
                 return False
         else:
             api.LoadSpice2D(Filename=file_path, OutputWorkspace=outputWorkspace)
         return True
     except:
         return False
Example #51
0
    def __init__(self, exp_number, scan_number, peak_ws_name):
        """ Initialization
        Purpose: set up unchanged parameters including experiment number, scan number and peak workspace's name
        """
        # check
        assert isinstance(exp_number, int) and isinstance(scan_number, int)
        assert isinstance(peak_ws_name, str), 'Peak workspace name %s must be a string.' \
                                              'but not %s.' % (str(peak_ws_name),
                                                               str(type(peak_ws_name)))
        assert AnalysisDataService.doesExist(peak_ws_name), 'Peak workspace %s does not' \
                                                            'exist.' % peak_ws_name

        # set
        self._myExpNumber = exp_number
        self._myScanNumber = scan_number
        self._myPeakWorkspaceName = peak_ws_name

        #
        self._myDataMDWorkspaceName = None

        # Define class variable
        # HKL list
        self._calculatedHKL = None    # user specified HKL
        self._spiceHKL = None                        # spice HKL
        self._prevHKL = numpy.array([0., 0., 0.])    # previous HKL

        # magnetic peak set up
        self._kShiftVector = [0, 0, 0]
        self._absorptionCorrection = 1.

        # peak center and PeaksWorkspace
        self._avgPeakCenter = None
        self._myPeakWSKey = (None, None, None)
        self._myPeakIndex = None

        self._myLastPeakUB = None

        self._myIntensity = None
        self._gaussIntensity = 0.
        self._gaussStdDev = 0.
        self._lorenzFactor = None

        # peak integration result
        self._integrationDict = None
        self._ptIntensityDict = None

        # some motor/goniometer information for further correction
        self._movingMotorTuple = None

        # Figure print
        self._fingerPrint = '{0:.7f}.{1}'.format(time.time(), random.randint(0, 10000000))

        return
Example #52
0
 def _deleteWorkspaces(self, workspaces):
     """
         Deletes a list of workspaces if they exist but ignores any errors
         @param workspaces: list of workspaces to try to delete
     """
     for wk in workspaces:
         try:
             if AnalysisDataService.doesExist(wk):
                 AnalysisDataService.remove(wk)
         except:
             #if the workspace can't be deleted this function does nothing
             pass
    def test_batch_reduction_on_time_sliced_file(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information("SANS2D00034484")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file_director = StateDirectorISIS(data_info, file_information)
        user_file_director.set_user_file("USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt")
        # Set the reduction mode to LAB
        user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.LAB)
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # COMPATIBILITY BEGIN -- Remove when appropriate
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # Since we are dealing with event based data but we want to compare it with histogram data from the
        # old reduction system we need to enable the compatibility mode
        user_file_director.set_compatibility_builder_use_compatibility_mode(True)
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # COMPATIBILITY END
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        user_file_director.set_slice_event_builder_start_time([1.0,3.0])
        user_file_director.set_slice_event_builder_end_time([3.0,5.0])

        state = user_file_director.construct()

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        expected_workspaces = ["34484rear_1D_1.75_16.5_t1.00_T3.00", "34484rear_1D_1.75_16.5_t3.00_T5.00"]
        reference_file_names = ["SANS2D_event_slice_referance_t1.00_T3.00.nxs", "SANS2D_event_slice_referance_t3.00_T5.00.nxs"]

        for element, reference_file in zip(expected_workspaces, reference_file_names):
            self.assertTrue(AnalysisDataService.doesExist(element))
            # Evaluate it up to a defined point
            self._compare_workspace(element, reference_file)

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
    def test_batch_reduction_on_period_time_sliced_wavelength_range_data(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information("SANS2D0005512")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D0005512")
        data_builder.set_sample_scatter_period(1)

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file_director = StateDirectorISIS(data_info, file_information)
        user_file_director.set_user_file("MASKSANS2Doptions.091A")
        # Set the reduction mode to LAB
        user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.LAB)

        user_file_director.set_slice_event_builder_start_time([1.0, 3.0])
        user_file_director.set_slice_event_builder_end_time([3.0, 5.0])

        state = user_file_director.construct()

        start = [1.0, 1.0]
        end = [3.0, 2.0]
        state.wavelength.wavelength_low = start
        state.wavelength.wavelength_high = end

        state.adjustment.normalize_to_monitor.wavelength_low = start
        state.adjustment.normalize_to_monitor.wavelength_high = end

        state.adjustment.calculate_transmission.wavelength_low = start
        state.adjustment.calculate_transmission.wavelength_high = end

        state.adjustment.wavelength_and_pixel_adjustment.wavelength_low = start
        state.adjustment.wavelength_and_pixel_adjustment.wavelength_high = end

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = ["5512p1rear_1D_1.0_2.0Phi-45.0_45.0_t1.00_T3.00", "5512p1rear_1D_1.0_2.0Phi-45.0_45.0_t3.00_T5.00",
                               "5512p1rear_1D_1.0_3.0Phi-45.0_45.0_t1.00_T3.00", "5512p1rear_1D_1.0_3.0Phi-45.0_45.0_t3.00_T5.00"
                               ]
        for element in expected_workspaces:
            self.assertTrue(AnalysisDataService.doesExist(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
Example #55
0
def get_already_loaded_calibration_workspace(full_file_path):
    """
    Gets a calibration workspace from the ADS if it exists.

    :param full_file_path: the full file path to the calibration workspace
    :return: a handle to the calibration workspace or None
    """
    calibration_workspace_name = get_expected_calibration_workspace_name(full_file_path)
    if AnalysisDataService.doesExist(calibration_workspace_name):
        output_ws = AnalysisDataService.retrieve(calibration_workspace_name)
    else:
        output_ws = None
    return output_ws
Example #56
0
    def clone_workspace(src_name, target_name):
        """clone workspace
        :param src_name:
        :param target_name:
        :return:
        """
        # check
        assert isinstance(src_name, str), 'blabla'
        assert isinstance(target_name, str), 'blabla'

        # check existence
        if AnalysisDataService.doesExist(src_name):
            simpleapi.CloneWorkspace(InputWorkspace=src_name, OutputWorkspace=target_name)
        else:
            raise RuntimeError('Workspace with name {0} does not exist in ADS. CloneWorkspace fails!'.format(src_name))
    def _isValidWorkspace(self, workspace_name, workspace_id):
        """Returns true, if the workspace of name workspace_name is a valid
        reflectometry workspace of type workspace_id and deletes the workspace
        otherwise"""
        if not _hasWorkspaceID(workspace_name, workspace_id):
            self.log().information('Workspace ' + workspace_name + ' exists but is not a ' + workspace_id)
            _removeWorkspace(workspace_name)
            return False

        # For event workspaces, the monitors workspace must also exist, otherwise it's not valid
        if workspace_id == "EventWorkspace":
            if not AnalysisDataService.doesExist(_monitorWorkspace(workspace_name)):
                self.log().information('Monitors workspace ' + workspace_name + '_monitors does not exist')
                _removeWorkspace(workspace_name)
                return False
        return True
Example #58
0
    def process_inputs(self):
        """
        process input properties
        :return: input event workspace, binning parameter workspace, gsas file name, output workspace name,
                ipts number
        """
        # get input properties
        input_ws_name = self.getPropertyValue("InputWorkspace")
        bin_par_ws_name = self.getPropertyValue('BinningTable')
        if len(bin_par_ws_name) > 0:
            bin_par_ws_exist = AnalysisDataService.doesExist(bin_par_ws_name)
        else:
            bin_par_ws_exist = False

        # event workspace is required for re-binning
        input_workspace = AnalysisDataService.retrieve(input_ws_name)
        if input_workspace.id() != 'EventWorkspace' and bin_par_ws_exist:
            self.log().warning('Input workspace {0} must be an EventWorkspace if rebin is required by {1}'
                               ''.format(input_workspace, bin_par_ws_name))
        elif input_workspace.getAxis(0).getUnit().unitID() != "TOF":
            raise NotImplementedError("InputWorkspace must be in unit as TOF.")

        # processing binning parameters
        if bin_par_ws_exist:
            binning_parameter_list = self.process_binning_param_table(input_workspace, bin_par_ws_name)
        else:
            binning_parameter_list = None

        # gsas file name (output)
        gss_file_name = self.getPropertyValue("GSSFilename")

        # output workspace name
        output_ws_name = self.getPropertyValue("OutputWorkspace")

        # IPTS-number
        ipts_number = self.getProperty("IPTS").value
        if ipts_number == mantid.kernel.Property.EMPTY_INT:
            try:
                run_number = input_workspace.run().getProperty('run').value
                ipts_number = api.GetIPTS(Instrument='VULCAN', RunNumber=run_number)
            except RuntimeError:
                ipts_number = 0

        # GSAS parm file name
        parm_file_name = self.getPropertyValue("GSSParmFileName")

        return input_workspace, binning_parameter_list, gss_file_name, output_ws_name, ipts_number, parm_file_name
Example #59
0
    def _process_binning_parameters(self, bin_par_str):
        """
        process binning parameters in string.  there are two types of binning parameters that are accepted
        1. regular x0, dx0, x1, dx1, etc or
        2. workspace name: workspace index
        :param bin_par_str:
        :return:
        """
        if bin_par_str.count(':') == 0:
            # parse regular binning parameters
            terms = bin_par_str.split(',')  # in string format
            try:
                bin_param = [float(term) for term in terms]
            except ValueError:
                raise RuntimeError('Binning parameters {0} have non-float terms.'.format(bin_par_str))

        elif bin_par_str.count(':') == 1:
            # in workspace name : workspace index mode
            terms = bin_par_str.split(':')
            ref_ws_name = terms[0].strip()
            if AnalysisDataService.doesExist(ref_ws_name) is False:
                raise RuntimeError('Workspace {0} does not exist (FYI {1})'
                                   ''.format(ref_ws_name, bin_par_str))
            try:
                ws_index = int(terms[1].strip())
            except ValueError:
                raise RuntimeError('{0} is supposed to be an integer for workspace index but not of type {1}.'
                                   ''.format(terms[1], type(terms[1])))

            ref_tof_ws = AnalysisDataService.retrieve(ref_ws_name)
            if ws_index < 0 or ws_index >= ref_tof_ws.getNumberHistograms():
                raise RuntimeError('Workspace index {0} must be in range [0, {1})'
                                   ''.format(ws_index, ref_tof_ws.getNumberHistograms()))

            ref_tof_vec = ref_tof_ws.readX(ws_index)
            delta_tof_vec = ref_tof_vec[1:] - ref_tof_vec[:-1]

            bin_param = numpy.empty((ref_tof_vec.size + delta_tof_vec.size), dtype=ref_tof_vec.dtype)
            bin_param[0::2] = ref_tof_vec
            bin_param[1::2] = delta_tof_vec

            self.log().warning('Binning parameters: size = {0}\n{1}'.format(len(bin_param), bin_param))

        else:
            raise RuntimeError('Binning format {0} is not supported.'.format(bin_par_str))

        return bin_param
    def test_that_batch_reduction_evaluates_LAB(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information("SANS2D00034484")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file_director = StateDirectorISIS(data_info, file_information)
        user_file_director.set_user_file("USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt")
        # Set the reduction mode to LAB
        user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.LAB)
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # COMPATIBILITY BEGIN -- Remove when appropriate
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # Since we are dealing with event based data but we want to compare it with histogram data from the
        # old reduction system we need to enable the compatibility mode
        user_file_director.set_compatibility_builder_use_compatibility_mode(True)
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # COMPATIBILITY END
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        state = user_file_director.construct()

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)
        workspace_name = "34484rear_1D_1.75_16.5"
        output_workspace = AnalysisDataService.retrieve(workspace_name)

        # Evaluate it up to a defined point
        reference_file_name = "SANS2D_ws_D20_reference_LAB_1D.nxs"
        self._compare_workspace(output_workspace, reference_file_name)

        if AnalysisDataService.doesExist(workspace_name):
            AnalysisDataService.remove(workspace_name)