Exemplo n.º 1
0
    def test_model_MD(self):
        ws1 = LoadEventNexus("CNCS_7860", MetaDataOnly=True)
        ws2 = LoadEventNexus("VIS_19351", MetaDataOnly=True)
        md = CreateMDWorkspace(Dimensions=1,
                               Extents='-1,1',
                               Names='A',
                               Units='U')
        md.addExperimentInfo(ws1)
        md.addExperimentInfo(ws2)
        model = SampleLogsModel(md)

        self.assertEqual(model.get_exp(), 0)
        self.assertEqual(model.get_name(), 'md')
        self.assertEqual(model.getNumExperimentInfo(), 2)

        values = model.get_log_display_values("duration")
        self.assertEqual(values[0], "duration")
        self.assertEqual(values[1], "number")
        self.assertEqual(values[2], 148.0)
        self.assertEqual(values[3], "second")

        # Change exp
        model.set_exp(1)
        self.assertEqual(model.get_exp(), 1)
        values = model.get_log_display_values("duration")
        self.assertEqual(values[0], "duration")
        self.assertEqual(values[1], "number")
        self.assertEqual(values[2], 4.616606712341309)
        self.assertEqual(values[3], "second")
Exemplo n.º 2
0
 def runTest(self):
     ev_ws = LoadEventNexus('LET00006278.nxs')
     # isis_vms_compat/SPB[2]
     self.assertEqual(ev_ws.sample().getGeometryFlag(), 1,
                      "Geometry flag mismatch. vms_compat block not read correctly")
     # Isis correct the tof using loadTimeOfFlight method.
     self.assertDelta(ev_ws.getSpectrum(10).getTofs()[1], 1041.81, 0.01,
                      "The ISIS event correction is incorrect (check LoadEventNexus::loadTimeOfFlight)")
Exemplo n.º 3
0
 def runTest(self):
     ev_ws = LoadEventNexus('LET00006278.nxs')
     # isis_vms_compat/SPB[2]
     self.assertEqual(ev_ws.sample().getGeometryFlag(), 1,
                      "Geometry flag mismatch. vms_compat block not read correctly")
     # Isis correct the tof using loadTimeOfFlight method.
     self.assertDelta(ev_ws.getSpectrum(10).getTofs()[1], 1041.81, 0.01,
                      "The ISIS event correction is incorrect (check LoadEventNexus::loadTimeOfFlight)")
Exemplo n.º 4
0
def load_and_rebin(runs: List[int],
                   output_workspace: str,
                   rebin_params: List[float],
                   banks: Optional[List[int]] = None) -> Workspace2D:
    r"""
    @brief Load a list of run numbers and rebin

    This function assumes the runs are large and events cannot be all loaded into memory. Hence, a run is loaded
    at a time, rebinned to TOF counts, events are dropped, and counts are added to the cumulative histogram
    resulting from loading the previous runs.

    @param runs : list of run numbers
    @param rebin_params : a triad of first, step, and last. A negative step indicates logarithmic binning
    @param output_workspace : the name of the output `MatrixWorkspace`
    @param banks : list of bank numbers, if one wants to load only certain banks.
    @return handle to the output workspace
    """
    instrument = 'CORELLI'
    kwargs = {} if banks is None else {
        'BankName': ','.join([f'bank{b}' for b in banks])
    }

    # Load the first run
    logger.information(
        f'Loading run {runs[0]}. {len(runs)} runs remaining to be loaded')
    LoadEventNexus(Filename=f'{instrument}_{runs[0]}',
                   OutputWorkspace=output_workspace,
                   LoadLogs=False,
                   **kwargs)
    if rebin_params is not None:
        Rebin(InputWorkspace=output_workspace,
              OutputWorkspace=output_workspace,
              Params=rebin_params,
              PreserveEvents=False)
    # Iteratively load the remaining run, adding to the final workspace each time
    try:
        single_run = '__single_run_' + output_workspace
        for i, run in enumerate(runs[1:]):
            logger.information(
                f'Loading run {run}. {len(runs) - 1 - i} runs remaining to be loaded'
            )
            LoadEventNexus(Filename=f'{instrument}_{run}',
                           OutputWorkspace=single_run,
                           LoadLogs=False,
                           **kwargs)
            if rebin_params is not None:
                Rebin(InputWorkspace=single_run,
                      OutputWorkspace=single_run,
                      Params=rebin_params,
                      PreserveEvents=False)
            Plus(LHSWorkspace=output_workspace,
                 RHSWorkspace=single_run,
                 OutputWorkspace=output_workspace)
            DeleteWorkspace(single_run)  # save memory as quick as possible
    except RuntimeError:
        DeleteWorkspace(single_run)  # a bit of clean-up
    return mtd[output_workspace]
 def _calculate_wavelength_band(self):
     """
     Select the wavelength band examining the logs of the first sample
     """
     runs = self.getProperty('RunNumbers').value
     run = self._run_lists(runs)[0]
     file_name = "{0}_{1}_event.nxs".format(self._short_inst, str(run))
     _t_w = LoadEventNexus(Filename=file_name, NXentryName='entry-diff',
                           SingleBankPixelsOnly=False)
     wavelength = np.mean(_t_w.getRun().getProperty('LambdaRequest').value)
     wavs = self._wavelength_bands
     midpoint = (wavs['111'][0] + wavs['311'][0]) / 2.0
     reflection = '111' if wavelength > midpoint else '311'
     self._wavelength_band = self._wavelength_bands[reflection]
Exemplo n.º 6
0
 def _calculate_wavelength_band(self):
     """
     Select the wavelength band examining the logs of the first sample
     """
     runs = self.getProperty('RunNumbers').value
     run = self._run_lists(runs)[0]
     file_name = "{0}_{1}_event.nxs".format(self._short_inst, str(run))
     _t_w = LoadEventNexus(Filename=file_name,
                           NXentryName='entry-diff',
                           SingleBankPixelsOnly=False)
     wavelength = np.mean(_t_w.getRun().getProperty('LambdaRequest').value)
     wavs = self._wavelength_bands
     midpoint = (wavs['111'][0] + wavs['311'][0]) / 2.0
     reflection = '111' if wavelength > midpoint else '311'
     self._wavelength_band = self._wavelength_bands[reflection]
Exemplo n.º 7
0
    def test_RunsSuccessfully(self):
        LoadEventNexus(Filename='REF_L_179926.nxs.h5', OutputWorkspace='REF_L_179926')
        LoadEventNexus(Filename='REF_L_179927.nxs.h5', OutputWorkspace='REF_L_179927')

        LRDirectBeamSort(WorkspaceList=['REF_L_179926', 'REF_L_179927'],
                         ComputeScalingFactors=True,
                         OrderDirectBeamsByRunNumber=False,
                         SlitTolerance=0.06,
                         IncidentMedium=self.medium,
                         UseLowResCut=False,
                         TOFSteps=200)

        # Clean
        DeleteWorkspace('REF_L_179926')
        DeleteWorkspace('REF_L_179927')
Exemplo n.º 8
0
    def _load_runs(self, runs, w_name):
        """
        Load all run event Nexus files into a single `EventWorkspace`

        Parameters
        ----------
        runs: str
            Run numbers to be reduced. Symbol `;` separates the runs into
            substrings. Each substring represents a set of runs to be
            reduced together
        w_name: str
            Name of output workspace

        Returns
        -------
        Mantid.EventsWorkspace
        """
        rl = self._run_lists(runs)
        #
        # Load files together
        #
        _t_all_w = None
        for run in rl:
            file_name = "{0}_{1}_event.nxs".format(self._short_inst, str(run))
            _t_w = LoadEventNexus(Filename=file_name,
                                  NXentryName='entry-diff',
                                  SingleBankPixelsOnly=False)
            if _t_all_w is None:
                _t_all_w = CloneWorkspace(_t_w)
            else:
                _t_all_w += _t_w
        RenameWorkspace(_t_all_w, OutputWorkspace=w_name)
        return _t_all_w
Exemplo n.º 9
0
def load_banks(run: Union[int, str], bank_selection: str, output_workspace: str) -> Workspace2D:
    r"""
    Load events only for the selected banks, and don't load metadata.

    If the file is not an events file, but a Nexus processed file, the bank_selection is ignored.
    :param run: run-number or filename to an Event nexus file or a processed nexus file
    :param bank_selection: selection string, such as '10,12-15,17-21'
    :param output_workspace: name of the output workspace containing counts per pixel
    :return: workspace containing counts per pixel. Events in each pixel are integrated into neutron counts.
    """
    # Resolve the input run
    if isinstance(run, int):
        file_descriptor = f'CORELLI_{run}'
    else:  # a run number given as a string, or the path to a file
        try:
            file_descriptor = f'CORELLI_{str(int(run))}'
        except ValueError:  # run is path to a file
            filename = run
            assert path.exists(filename), f'File {filename} does not exist'
            file_descriptor = filename

    bank_names = ','.join(['bank' + b for b in bank_numbers(bank_selection)])
    try:
        LoadEventNexus(Filename=file_descriptor, OutputWorkspace=output_workspace,
                       BankName=bank_names, LoadMonitors=False, LoadLogs=True)
    except (RuntimeError, ValueError):
        LoadNexusProcessed(Filename=file_descriptor, OutputWorkspace=output_workspace)
    Integration(InputWorkspace=output_workspace, OutputWorkspace=output_workspace)
    return mtd[output_workspace]
    def _load_single_run(self, run, name):
        """
        Find and load events from the diffraction tubes.

        Run number 90000 discriminates between the old and new DAS

        Parameters
        ----------
        run: str
            Run number
        name: str
            Name of the output EventsWorkspace

        Returns
        -------
        EventsWorkspace
        """
        banks = ','.join(['bank{}'.format(i) for i in self._diff_bank_numbers])
        particular = {
            VDAS.v1900_2018: dict(NXentryName='entry-diff'),
            VDAS.v2019_2100: dict(BankName=banks)
        }
        identifier = "{0}_{1}".format(self._short_inst, str(run))
        kwargs = dict(Filename=identifier,
                      SingleBankPixelsOnly=False,
                      OutputWorkspace=name)
        kwargs.update(particular[self._das_version])
        return LoadEventNexus(**kwargs)
Exemplo n.º 11
0
 def _save_t0(self, run_number, name='_t_ws'):
     """
     Create temporary events file with delayed emission time from
     moderator removed
     :param run: run number
     :param name: name for the output workspace
     :return: file name of event file with events treated with algorithm
     ModeratorTzeroLinear.
     """
     ws = LoadEventNexus(Filename=self._makeRunFile(run_number),
                         NXentryName='entry-diff',
                         OutputWorkspace=name)
     ws = ModeratorTzeroLinear(InputWorkspace=ws.name(),
                               OutputWorkspace=ws.name())
     file_name = self._spawn_tempnexus()
     SaveNexus(ws, file_name)
     return file_name
Exemplo n.º 12
0
 def _loadMetaWS(self, runnumber):
     # currently only event nexus files are supported
     wsname = '__meta_SNAP_{}'.format(runnumber)
     LoadEventNexus(Filename='SNAP' + str(runnumber),
                    OutputWorkspace=wsname,
                    MetaDataOnly=True,
                    LoadLogs=False)
     return wsname
Exemplo n.º 13
0
 def _save_t0(self, run_number, name='_t_ws'):
     """
     Create temporary events file with delayed emission time from
     moderator removed
     :param run: run number
     :param name: name for the output workspace
     :return: file name of event file with events treated with algorithm
     ModeratorTzeroLinear.
     """
     ws = LoadEventNexus(Filename=self._makeRunFile(run_number),
                         NXentryName='entry-diff',
                         OutputWorkspace=name)
     ws = ModeratorTzeroLinear(InputWorkspace=ws.name(),
                               OutputWorkspace=ws.name())
     file_name = self._spawn_tempnexus()
     SaveNexus(ws, file_name)
     return file_name
Exemplo n.º 14
0
    def __load_logs(self, logs_to_keep):
        '''Use mantid to load the logs then set up the Splitters object'''
        self._event_wksp = LoadEventNexus(Filename=self._nexus_name,
                                          OutputWorkspace=self._event_ws_name,
                                          MetaDataOnly=True,
                                          LoadMonitors=False)

        # remove unwanted sample logs
        RemoveLogs(self._event_wksp, KeepLogs=logs_to_keep)

        # raise an exception if there is only one scan index entry
        # this is an underlying assumption of the rest of the code
        if self._event_wksp.run()['scan_index'].size() == 1 \
                or np.unique(self._event_wksp.run()['scan_index'].value).size == 1:
            self._splitter = None
        else:
            # object to be used for splitting times
            self._splitter = Splitter(self._event_wksp.run())
Exemplo n.º 15
0
 def _loadRun(self, run, isTrans):
     """Load a run as an event workspace if slicing is requested, or a histogram
     workspace otherwise. Transmission runs are always loaded as histogram workspaces."""
     workspace_name=self._prefixedName(run, isTrans)
     if not isTrans and self._slicingEnabled():
         LoadEventNexus(Filename=run, OutputWorkspace=workspace_name, LoadMonitors=True)
         _throwIfNotValidReflectometryEventWorkspace(workspace_name)
         self.log().information('Loaded event workspace ' + workspace_name)
     else:
         LoadNexus(Filename=run, OutputWorkspace=workspace_name)
         self.log().information('Loaded workspace ' + workspace_name)
     workspace_name = self._renameWorkspaceBasedOnRunNumber(workspace_name, isTrans)
     return workspace_name
Exemplo n.º 16
0
 def load_meta_data(cls, file_path, outputWorkspace):
     try:
         if IN_MANTIDPLOT:
             script = "LoadEventNexus(Filename='%s', OutputWorkspace='%s', MetaDataOnly=True)" % (
                 file_path, outputWorkspace)
             execute_script(script)
             if not AnalysisDataService.doesExist(outputWorkspace):
                 return False
         else:
             LoadEventNexus(Filename=file_path,
                            OutputWorkspace=outputWorkspace,
                            MetaDataOnly=True)
         return True
     except:
         return False
Exemplo n.º 17
0
 def _mask_t0_crop(self, run_number, name):
     """
     Load a run into a workspace with:
      1. Masked detectors
      2. Delayed emission time from  moderator removed
      3. Conversion of units to momentum
      4. Remove events outside the valid momentum range
     :param run_number: BASIS run number
     :param name: name for the output workspace
     :return: workspace object
     """
     ws = LoadEventNexus(Filename=self._makeRunFile(run_number),
                         NXentryName='entry-diff',
                         SingleBankPixelsOnly=False,
                         OutputWorkspace=name)
     MaskDetectors(ws, MaskedWorkspace=self._t_mask)
     ws = ModeratorTzeroLinear(InputWorkspace=ws.name(),
                               OutputWorkspace=ws.name())
     ws = ConvertUnits(ws, Target='Momentum', OutputWorkspace=ws.name())
     ws = CropWorkspace(ws,
                        OutputWorkspace=ws.name(),
                        XMin=self._momentum_range[0],
                        XMax=self._momentum_range[1])
     return ws
Exemplo n.º 18
0
def load_banks(filename: str, bank_selection: str, output_workspace: str) -> Workspace2D:
    r"""
    Load events only for the selected banks, and don't load metadata.

    If the file is not an events file, but a Nexus processed file, the bank_selection is ignored.
    :param filename: Filename to an Event nexus file or a processed nexus file
    :param bank_selection: selection string, such as '10,12-15,17-21'
    :param output_workspace: name of the output workspace containing counts per pixel
    :return: workspace containing counts per pixel. Events in each pixel are integrated into neutron counts.
    """
    assert path.exists(filename), f'File {filename} does not exist'
    bank_names = ','.join(['bank' + b for b in bank_numbers(bank_selection)])
    try:
        LoadEventNexus(Filename=filename, OutputWorkspace=output_workspace,
                       BankName=bank_names, LoadMonitors=False, LoadLogs=False)
    except (RuntimeError, ValueError):
        LoadNexusProcessed(Filename=filename, OutputWorkspace=output_workspace)
    Integration(InputWorkspace=output_workspace, OutputWorkspace=output_workspace)
    return mtd[output_workspace]
Exemplo n.º 19
0
def loadIntegrateData(filename, OutputWorkspace='__ws', wavelength=1.488):
    LoadEventNexus(Filename=filename,
                   OutputWorkspace=OutputWorkspace,
                   LoadMonitors=True)
    Integration(InputWorkspace=OutputWorkspace,
                OutputWorkspace=OutputWorkspace)
    MaskDetectors(OutputWorkspace, DetectorList=range(16384))
    mtd[OutputWorkspace].getAxis(0).setUnit("Wavelength")
    w = np.array([wavelength - 0.001, wavelength + 0.001])
    for idx in range(mtd[OutputWorkspace].getNumberHistograms()):
        mtd[OutputWorkspace].setX(idx, w)
    SetGoniometer(OutputWorkspace, Axis0="HB2C:Mot:s1,0,1,0,1")
    AddSampleLog(OutputWorkspace,
                 LogName="gd_prtn_chrg",
                 LogType='Number',
                 NumberType='Double',
                 LogText=str(mtd[OutputWorkspace +
                                 '_monitors'].getNumberEvents()))
    return OutputWorkspace
Exemplo n.º 20
0
    def PyExec(self):
        filename = self.getProperty("Filename").value
        wavelength = self.getProperty("wavelength").value
        outWS = self.getPropertyValue("OutputWorkspace")

        LoadEventNexus(Filename=filename,
                       OutputWorkspace=outWS,
                       LoadMonitors=True)
        Integration(InputWorkspace=outWS, OutputWorkspace=outWS)

        if self.getProperty("ApplyMask").value:
            MaskBTP(outWS, Bank='8', Tube='449-480')
            MaskBTP(outWS, Pixel='1,2,511,512')

        mtd[outWS].getAxis(0).setUnit("Wavelength")
        w = [wavelength - 0.001, wavelength + 0.001]
        for idx in range(mtd[outWS].getNumberHistograms()):
            mtd[outWS].setX(idx, w)

        SetGoniometer(outWS, Axis0="HB2C:Mot:s1,0,1,0,1")
        AddSampleLog(outWS,
                     LogName="gd_prtn_chrg",
                     LogType='Number',
                     NumberType='Double',
                     LogText=str(mtd[outWS + '_monitors'].getNumberEvents()))
        DeleteWorkspace(outWS + '_monitors')

        AddSampleLog(outWS,
                     LogName="Wavelength",
                     LogType='Number',
                     NumberType='Double',
                     LogText=str(wavelength))
        AddSampleLog(outWS,
                     LogName="Ei",
                     LogType='Number',
                     NumberType='Double',
                     LogText=str(
                         UnitConversion.run('Wavelength', 'Energy', wavelength,
                                            0, 0, 0, Elastic, 0)))

        self.setProperty('OutputWorkspace', outWS)
Exemplo n.º 21
0
    def test_model(self):
        ws = LoadEventNexus('CNCS_7860', MetaDataOnly=True)
        model = SampleLogsModel(ws)

        self.assertEqual(model.get_exp(), 0)
        self.assertEqual(model.get_name(), 'ws')
        self.assertEqual(model.getNumExperimentInfo(), 0)

        log = model.get_log("Speed5")
        self.assertEqual(log.name, "Speed5")
        self.assertEqual(log.size(), 4)

        log_names = model.get_log_names()
        self.assertEqual(len(log_names), 48)
        self.assertIn("Speed5", log_names)

        values = model.get_log_display_values("Speed5")
        self.assertEqual(values[0], "Speed5")
        self.assertEqual(values[1], "float series")
        self.assertEqual(values[2], "(4 entries)")
        self.assertEqual(values[3], "Hz")

        self.assertTrue(model.is_log_plottable("Speed5"))
        self.assertFalse(model.is_log_plottable("duration"))

        stats = model.get_statistics("Speed5")
        self.assertEqual(stats.maximum, 300.0)

        self.assertFalse(model.isMD())

        itemModel = model.getItemModel()
        self.assertEqual(itemModel.horizontalHeaderItem(0).text(), "Name")
        self.assertEqual(itemModel.horizontalHeaderItem(1).text(), "Type")
        self.assertEqual(itemModel.horizontalHeaderItem(2).text(), "Value")
        self.assertEqual(itemModel.horizontalHeaderItem(3).text(), "Units")
        self.assertEqual(itemModel.rowCount(), 48)
        self.assertEqual(itemModel.item(0, 0).text(), "ChopperStatus1")
        self.assertEqual(itemModel.item(0, 1).text(), "float series")
        self.assertEqual(itemModel.item(0, 2).text(), "4.0")
        self.assertEqual(itemModel.item(0, 3).text(), "")
Exemplo n.º 22
0
 def _mask_t0_crop(self, run_number, name):
     """
     Load a run into a workspace with:
      1. Masked detectors
      2. Delayed emission time from  moderator removed
      3. Conversion of units to momentum
      4. Remove events outside the valid momentum range
     :param run_number: BASIS run number
     :param name: name for the output workspace
     :return: workspace object
     """
     ws = LoadEventNexus(Filename=self._makeRunFile(run_number),
                         NXentryName='entry-diff',
                         SingleBankPixelsOnly=False,
                         OutputWorkspace=name)
     MaskDetectors(ws, MaskedWorkspace=self._t_mask)
     ws = ModeratorTzeroLinear(InputWorkspace=ws.name(),
                               OutputWorkspace=ws.name())
     ws = ConvertUnits(ws, Target='Momentum', OutputWorkspace=ws.name())
     ws = CropWorkspace(ws,
                        OutputWorkspace=ws.name(),
                        XMin=self._momentum_range[0],
                        XMax=self._momentum_range[1])
     return ws
Exemplo n.º 23
0
    def load_data(self, event_file_name):
        """
        Load an event file
        :param event_file_name:
        :return:
        """
        datatypeutility.check_file_name(event_file_name, check_exist=True, note='Event data file')

        # generate output workspace and data key
        out_ws_name, data_key = self.generate_output_workspace_name(event_file_name)

        # keep it as the current workspace
        if event_file_name.endswith('.h5'):
            self._last_loaded_event_ws = LoadEventNexus(Filename=event_file_name, MetaDataOnly=False, Precount=True,
                                                        OutputWorkspace=out_ws_name)
        else:
            self._last_loaded_event_ws = Load(Filename=event_file_name, OutputWorkspace=out_ws_name)

        self._last_loaded_ref_id = data_key

        self._ws_name_dict[data_key] = out_ws_name

        return data_key
Exemplo n.º 24
0
import h5py
from mantid.simpleapi import LoadEventNexus

filename = '/HFIR/HB2C/IPTS-7776/nexus/HB2C_26625.nxs.h5'

ws = LoadEventNexus(Filename=filename, MetaDataOnly=True)
if ws.run().hasProperty('HB2C:CS:ITEMS:Nature'):
    nature = ws.run().getProperty('HB2C:CS:ITEMS:Nature').value[0]
    print(nature)
    print('Powder: {}'.format(nature == "Powder"))

with h5py.File(filename, 'r') as f:
    if '/entry/DASlogs/HB2C:CS:ITEMS:Nature' in f:
        nature = f['/entry/DASlogs/HB2C:CS:ITEMS:Nature/value'].value[0][0]
        print(nature)
        print('Powder: {}'.format(nature == "Powder"))
Exemplo n.º 25
0
 def __init__(self, *args):
     LoadEventNexus(Filename='REF_L_179926.nxs.h5',
                    OutputWorkspace='REF_L_179926')
     LoadEventNexus(Filename='REF_L_179927.nxs.h5',
                    OutputWorkspace='REF_L_179927')
     unittest.TestCase.__init__(self, *args)
Exemplo n.º 26
0
from mantid.simpleapi import LoadEventNexus, Integration
import numpy as np

# 47299 23, 56, 85
# 47300 33, 45, 57

dirc = '/SNS/users/rwp/corelli/tube_calibration/'

run = 47299

for bank in [23, 56, 85]:
    data = LoadEventNexus('/SNS/CORELLI/IPTS-18479/nexus/CORELLI_' + str(run) +
                          '.nxs.h5',
                          BankName='bank' + str(bank))
    data = Integration(data)
    data_Y = data.extractY() * -1
    for tube in range(16):
        np.savetxt(
            dirc + 'COR_' + str(run) + '_' + str(bank) + '_' + str(tube + 1) +
            '.txt',
            np.concatenate((np.array(range(256), ndmin=2).T, data_Y[range(
                256 * tube, 256 * (tube + 1))]),
                           axis=1))

for run in range(47300, 47305):
    for bank in [33, 45, 57]:
        data = LoadEventNexus('/SNS/CORELLI/IPTS-18479/nexus/CORELLI_' +
                              str(run) + '.nxs.h5',
                              BankName='bank' + str(bank))
        data = Integration(data)
        data_Y = data.extractY() * -1
Exemplo n.º 27
0
def _load_ws(entry,
             ext,
             inst,
             ws_name,
             raw_types,
             period=_NO_INDIVIDUAL_PERIODS):
    filename, ext = _make_filename(entry, ext, inst)
    sanslog.notice('reading file:\t{}'.format(filename))

    is_data_set_event = False
    workspace_type = get_workspace_type(filename)
    if workspace_type is WorkspaceType.MultiperiodHistogram:
        if period != _NO_INDIVIDUAL_PERIODS:
            outWs = Load(Filename=filename,
                         OutputWorkspace=ws_name,
                         EntryNumber=period)
        else:
            outWs = Load(Filename=filename, OutputWorkspace=ws_name)
    elif workspace_type is WorkspaceType.Histogram:
        outWs = Load(Filename=filename, OutputWorkspace=ws_name)
    elif workspace_type is WorkspaceType.Event or workspace_type is WorkspaceType.MultiperiodEvent:
        is_data_set_event = True
        temp_ws_name = ws_name + "_event_temp"
        temp_ws_name_monitors = temp_ws_name + "_monitors"
        ws_name_monitors = ws_name + "_monitors"

        LoadEventNexus(Filename=filename,
                       OutputWorkspace=temp_ws_name,
                       LoadMonitors=True)
        outWs = mtd[temp_ws_name]
        # If we are dealing with a multiperiod workspace then we must can only use a single period at a
        # time, hence we reload from disk the whole data set every time which is very bad and must be
        # cached in the future
        if isinstance(outWs, WorkspaceGroup):
            remove_unwanted_workspaces(ws_name, temp_ws_name, period)
            remove_unwanted_workspaces(ws_name_monitors, temp_ws_name_monitors,
                                       period)
        else:
            RenameWorkspace(InputWorkspace=temp_ws_name,
                            OutputWorkspace=ws_name)
            RenameWorkspace(InputWorkspace=temp_ws_name_monitors,
                            OutputWorkspace=ws_name_monitors)

        run_details = mtd[ws_name].getRun()
        time_array = run_details.getLogData("proton_charge").times

        # There should never be a time increment in the proton charge larger than say "two weeks"
        # SANS2D currently is run at 10 frames per second. This may be incremented to 5Hz
        # (step of 0.2 sec). Although time between frames may be larger due to having the SMP veto switched on,
        # but hopefully not longer than two weeks!
        for i in range(len(time_array) - 1):
            # Cal time dif in seconds
            time_diff = (time_array[i + 1] - time_array[i]) / np.timedelta64(
                1, 's')
            if time_diff > 172800:
                sanslog.warning(
                    "Time increments in the proton charge log of {} are suspiciously large. "
                    "For example, a time difference of {} seconds has "
                    "been observed.".format(filename, str(time_diff)))
                break
    else:
        outWs = Load(Filename=filename, OutputWorkspace=ws_name)

    full_path, __ = getFileAndName(filename)
    path, f_name = os.path.split(full_path)
    if path.find('/') == -1:
        # Looks like we're on a windows system, convert the directory separators
        path = path.replace('\\', '/')

    if _is_type(ext, raw_types):
        LoadSampleDetailsFromRaw(InputWorkspace=ws_name,
                                 Filename=path + '/' + f_name)

    # Change below when logs in Nexus files work  file types of .raw need their log files to be copied too
    # if isType(ext, raw_types):
    log_file = os.path.splitext(f_name)[0] + '.log'
    try:
        outWs = mtd[ws_name]
        run = outWs.getRun()
        num_periods = run.getLogData('nperiods').value
    except Exception:
        # Assume the run file didn't support multi-period data and so there is only one period
        num_periods = 1

    return path, f_name, log_file, num_periods, is_data_set_event
Exemplo n.º 28
0
fitwinws.addColumn("int", "spectrum")
for ipk in range(len(FinalDReference)):
    fitwinws.addColumn("double", "Peak_%d_Left" % (ipk))
    fitwinws.addColumn("double", "Peak_%d_Right" % (ipk))

row = [-1]
for refpeak in FinalDReference:
    leftbound = fitwindict[refpeak][0]
    rightbound = fitwindict[refpeak][1]
    row.append(leftbound)
    row.append(rightbound)

fitwinws.addRow(row)

for run in range(first, last + 1):
    LoadEventNexus(Filename='CORELLI_' + str(run), OutputWorkspace='data')
    if tube_cal:
        ApplyCalibration('data', 'CalibTable')
    SetInstrumentParameter(Workspace="data",
                           ParameterName="t0_formula",
                           Value="(23.5 * exp(-incidentEnergy/205.8))")
    ModeratorTzero(InputWorkspace="data",
                   OutputWorkspace="data",
                   EMode="Elastic")
    MaskBTP(Workspace='data', Pixel="1-16,241-256")
    ConvertUnits(InputWorkspace='data',
                 OutputWorkspace='data',
                 Target='dSpacing')
    Rebin(InputWorkspace='data',
          OutputWorkspace='data',
          Params='0.5,-0.004,10')
Exemplo n.º 29
0
from mantid.simpleapi import LoadEventNexus
ws = LoadEventNexus(Filename='/HFIR/HB2C/IPTS-7776/nexus/HB2C_26625.nxs.h5',
                    MetaDataOnly=True)
if ws.run().hasProperty('HB2C:CS:CrystalAlign:UBMatrix'):
    ub = ','.join(
        ws.run().getProperty('HB2C:CS:CrystalAlign:UBMatrix').value[0].replace(
            '[', '').replace(']', '').split())
    print(ub)
Exemplo n.º 30
0
    def load_and_group(self, runs: List[str]) -> IMDHistoWorkspace:
        """
        Load the data with given grouping
        """
        # grouping config
        grouping = self.getProperty("Grouping").value
        if grouping == 'None':
            grouping = 1
        else:
            grouping = 2 if grouping == '2x2' else 4
        number_of_runs = len(runs)

        x_dim = 480 * 8 // grouping
        y_dim = 512 // grouping

        data_array = np.empty((number_of_runs, x_dim, y_dim), dtype=np.float64)

        s1_array = []
        duration_array = []
        run_number_array = []
        monitor_count_array = []

        progress = Progress(self, 0.0, 1.0, number_of_runs + 3)

        for n, run in enumerate(runs):
            progress.report('Loading: ' + run)
            with h5py.File(run, 'r') as f:
                bc = np.zeros((512 * 480 * 8), dtype=np.int64)
                for b in range(8):
                    bc += np.bincount(f['/entry/bank' + str(b + 1) +
                                        '_events/event_id'].value,
                                      minlength=512 * 480 * 8)
                bc = bc.reshape((480 * 8, 512))
                if grouping == 2:
                    bc = bc[::2, ::2] + bc[1::2, ::2] + bc[::2,
                                                           1::2] + bc[1::2,
                                                                      1::2]
                elif grouping == 4:
                    bc = bc[::4, ::4] + bc[1::4, ::4] + bc[2::4, ::4] + bc[3::4, ::4] + bc[::4, 1::4] + bc[1::4, 1::4] + bc[2::4, 1::4] + \
                         bc[3::4, 1::4] + bc[::4, 2::4] + bc[1::4, 2::4] + bc[2::4, 2::4] + bc[3::4, 2::4] + bc[::4, 3::4] + \
                         bc[1::4, 3::4] + bc[2::4, 3::4] + bc[3::4, 3::4]
                data_array[n] = bc
                s1_array.append(
                    f['/entry/DASlogs/HB2C:Mot:s1.RBV/average_value'].value[0])
                duration_array.append(float(f['/entry/duration'].value[0]))
                run_number_array.append(float(f['/entry/run_number'].value[0]))
                monitor_count_array.append(
                    float(f['/entry/monitor1/total_counts'].value[0]))

        progress.report('Creating MDHistoWorkspace')
        createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace",
                                                 enableLogging=False)
        createWS_alg.setProperty("SignalInput", data_array)
        createWS_alg.setProperty("ErrorInput", np.sqrt(data_array))
        createWS_alg.setProperty("Dimensionality", 3)
        createWS_alg.setProperty(
            "Extents", '0.5,{},0.5,{},0.5,{}'.format(y_dim + 0.5, x_dim + 0.5,
                                                     number_of_runs + 0.5))
        createWS_alg.setProperty(
            "NumberOfBins", '{},{},{}'.format(y_dim, x_dim, number_of_runs))
        createWS_alg.setProperty("Names", 'y,x,scanIndex')
        createWS_alg.setProperty("Units", 'bin,bin,number')
        createWS_alg.execute()
        outWS = createWS_alg.getProperty("OutputWorkspace").value

        progress.report('Getting IDF')
        # Get the instrument and some logs from the first file; assume the rest are the same
        _tmp_ws = LoadEventNexus(runs[0],
                                 MetaDataOnly=True,
                                 EnableLogging=False)
        # The following logs should be the same for all runs
        RemoveLogs(
            _tmp_ws,
            KeepLogs=
            'HB2C:Mot:detz,HB2C:Mot:detz.RBV,HB2C:Mot:s2,HB2C:Mot:s2.RBV,'
            'HB2C:Mot:sgl,HB2C:Mot:sgl.RBV,HB2C:Mot:sgu,HB2C:Mot:sgu.RBV,'
            'run_title,start_time,experiment_identifier,HB2C:CS:CrystalAlign:UBMatrix',
            EnableLogging=False)

        time_ns_array = _tmp_ws.run().startTime().totalNanoseconds(
        ) + np.append(0,
                      np.cumsum(duration_array) * 1e9)[:-1]

        try:
            ub = np.array(re.findall(
                r'-?\d+\.*\d*',
                _tmp_ws.run().getProperty(
                    'HB2C:CS:CrystalAlign:UBMatrix').value[0]),
                          dtype=float).reshape(3, 3)
            sgl = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgl.RBV').value[0])  # 'HB2C:Mot:sgl.RBV,1,0,0,-1'
            sgu = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgu.RBV').value[0])  # 'HB2C:Mot:sgu.RBV,0,0,1,-1'
            sgl_a = np.array([[1, 0, 0], [0, np.cos(sgl),
                                          np.sin(sgl)],
                              [0, -np.sin(sgl), np.cos(sgl)]])
            sgu_a = np.array([[np.cos(sgu), np.sin(sgu), 0],
                              [-np.sin(sgu), np.cos(sgu), 0], [0, 0, 1]])
            UB = sgl_a.dot(sgu_a).dot(
                ub)  # Apply the Goniometer tilts to the UB matrix
            SetUB(_tmp_ws, UB=UB, EnableLogging=False)
        except (RuntimeError, ValueError):
            SetUB(_tmp_ws, EnableLogging=False)

        if grouping > 1:
            _tmp_group, _, _ = CreateGroupingWorkspace(InputWorkspace=_tmp_ws,
                                                       EnableLogging=False)

            group_number = 0
            for x in range(0, 480 * 8, grouping):
                for y in range(0, 512, grouping):
                    group_number += 1
                    for j in range(grouping):
                        for i in range(grouping):
                            _tmp_group.dataY(y + i +
                                             (x + j) * 512)[0] = group_number

            _tmp_ws = GroupDetectors(InputWorkspace=_tmp_ws,
                                     CopyGroupingFromWorkspace=_tmp_group,
                                     EnableLogging=False)
            DeleteWorkspace(_tmp_group, EnableLogging=False)

        progress.report('Adding logs')

        # Hack: ConvertToMD is needed so that a deep copy of the ExperimentInfo can happen
        # outWS.addExperimentInfo(_tmp_ws) # This doesn't work but should, when you delete `ws` `outWS` also loses it's ExperimentInfo
        _tmp_ws = Rebin(_tmp_ws, '0,1,2', EnableLogging=False)
        _tmp_ws = ConvertToMD(_tmp_ws,
                              dEAnalysisMode='Elastic',
                              EnableLogging=False,
                              PreprocDetectorsWS='__PreprocessedDetectorsWS')

        preprocWS = mtd['__PreprocessedDetectorsWS']
        twotheta = preprocWS.column(2)
        azimuthal = preprocWS.column(3)

        outWS.copyExperimentInfos(_tmp_ws)
        DeleteWorkspace(_tmp_ws, EnableLogging=False)
        DeleteWorkspace('__PreprocessedDetectorsWS', EnableLogging=False)
        # end Hack

        add_time_series_property('s1',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, s1_array)
        outWS.getExperimentInfo(0).run().getProperty('s1').units = 'deg'
        add_time_series_property('duration',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, duration_array)
        outWS.getExperimentInfo(0).run().getProperty(
            'duration').units = 'second'
        outWS.getExperimentInfo(0).run().addProperty('run_number',
                                                     run_number_array, True)
        add_time_series_property('monitor_count',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, monitor_count_array)
        outWS.getExperimentInfo(0).run().addProperty('twotheta', twotheta,
                                                     True)
        outWS.getExperimentInfo(0).run().addProperty('azimuthal', azimuthal,
                                                     True)

        setGoniometer_alg = self.createChildAlgorithm("SetGoniometer",
                                                      enableLogging=False)
        setGoniometer_alg.setProperty("Workspace", outWS)
        setGoniometer_alg.setProperty("Axis0", 's1,0,1,0,1')
        setGoniometer_alg.setProperty("Average", False)
        setGoniometer_alg.execute()

        return outWS
Exemplo n.º 31
0
class NeXusConvertingApp:
    """
    Convert NeXus file to Hidra project file
    """
    def __init__(self,
                 nexus_file_name,
                 mask_file_name=None,
                 extra_logs=list()):
        """Initialization

        Parameters
        ----------
        nexus_file_name : str
            Name of NeXus file
        mask_file_name : str
            Name of masking file
        extra_logs : list, tuple
            list of string with no default logs to keep in project file
        """
        # configure logging for this class
        self._log = Logger(__name__)

        # validate NeXus file exists
        checkdatatypes.check_file_name(nexus_file_name, True, False, False,
                                       'NeXus file')
        self._nexus_name = nexus_file_name

        # validate mask file exists
        if mask_file_name is None:
            self._mask_file_name = None
        else:
            checkdatatypes.check_file_name(mask_file_name, True, False, False,
                                           'Mask file')
            self._mask_file_name = mask_file_name
            if not mask_file_name.lower().endswith('.xml'):
                raise NotImplementedError(
                    'Only Mantid mask in XML format is supported now.  File '
                    '{} with type {} is not supported yet.'
                    ''.format(mask_file_name,
                              mask_file_name.split('.')[-1]))

        # workspaces
        self._event_ws_name = os.path.basename(nexus_file_name).split('.')[0]

        logs_to_keep = list(extra_logs)
        logs_to_keep.extend(DEFAULT_KEEP_LOGS)

        self.__load_logs(logs_to_keep)

        # load the mask
        self.mask_array = None  # TODO to promote direct access
        if mask_file_name:
            self.__load_mask(mask_file_name)

        # create the hidra workspace
        self._hidra_workspace = workspaces.HidraWorkspace(self._nexus_name)

        # Set a default instrument with this workspace
        # set up instrument
        # initialize instrument with hard coded values
        instrument = DENEXDetectorGeometry(NUM_PIXEL_1D, NUM_PIXEL_1D,
                                           PIXEL_SIZE, PIXEL_SIZE, ARM_LENGTH,
                                           False)

        self._hidra_workspace.set_instrument_geometry(instrument)

        # project file
        self._project_file = None

    def __del__(self):
        if self._event_ws_name in mtd:
            DeleteWorkspace(Workspace=self._event_ws_name, EnableLogging=False)

    def __load_logs(self, logs_to_keep):
        '''Use mantid to load the logs then set up the Splitters object'''
        self._event_wksp = LoadEventNexus(Filename=self._nexus_name,
                                          OutputWorkspace=self._event_ws_name,
                                          MetaDataOnly=True,
                                          LoadMonitors=False)

        # remove unwanted sample logs
        RemoveLogs(self._event_wksp, KeepLogs=logs_to_keep)

        # raise an exception if there is only one scan index entry
        # this is an underlying assumption of the rest of the code
        if self._event_wksp.run()['scan_index'].size() == 1 \
                or np.unique(self._event_wksp.run()['scan_index'].value).size == 1:
            self._splitter = None
        else:
            # object to be used for splitting times
            self._splitter = Splitter(self._event_wksp.run())

    def __load_mask(self, mask_file_name):
        # Check input
        checkdatatypes.check_file_name(mask_file_name, True, False, False,
                                       'Mask XML file')
        if self._event_wksp is None:
            raise RuntimeError(
                'Meta data only workspace {} does not exist'.format(
                    self._event_ws_name))

        # Load mask XML to workspace
        mask_ws_name = os.path.basename(mask_file_name.split('.')[0])
        mask_ws = LoadMask(Instrument='nrsf2',
                           InputFile=mask_file_name,
                           RefWorkspace=self._event_wksp,
                           OutputWorkspace=mask_ws_name)

        # Extract mask out
        # get the Y array from mask workspace: shape = (1048576, 1)
        self.mask_array = mask_ws.extractY().flatten()
        # in Mantid's mask workspace: one stands delete, zero stands for keep
        # we multiply by the value: zero is delete, one is keep
        self.mask_array = 1 - self.mask_array.astype(int)

        # clean up
        DeleteWorkspace(Workspace=mask_ws_name)

    def _generate_subrun_event_indices(self, pulse_time_array,
                                       event_index_array, num_events):
        # convert times to array indices - a[i-1] < v <= a[i]
        subrun_pulseindex_array = np.searchsorted(pulse_time_array,
                                                  self._splitter.times)

        # locations that are greater than the number of pixels
        mask = subrun_pulseindex_array < event_index_array.size

        # it doesn't matter what the initial values are
        subrun_event_index = np.empty(subrun_pulseindex_array.size,
                                      dtype=subrun_pulseindex_array.dtype)
        # standard method is mappping
        subrun_event_index[mask] = event_index_array[
            subrun_pulseindex_array[mask]]
        # things off the end should be set to consume the rest of the events
        subrun_event_index[np.logical_not(mask)] = num_events + 1

        # make sure filter is sorted
        if not np.all(subrun_event_index[:-1] <= subrun_event_index[1:]):
            raise RuntimeError('Filter indices are not ordered: {}'.format(
                subrun_event_index))

        return subrun_event_index

    def split_events_sub_runs(self):
        '''Filter the data by ``scan_index`` and set counts array in the hidra_workspace'''
        # Load: this h5 will be opened all the time
        with h5py.File(self._nexus_name, 'r') as nexus_h5:
            bank1_events = nexus_h5['entry']['bank1_events']
            # Check number of neutron events.  Raise exception if there is no neutron event
            if bank1_events['total_counts'].value[0] < 0.1:
                # no counts
                raise RuntimeError(
                    'Run {} has no count.  Proper reduction requires the run to have count'
                    ''.format(self._nexus_name))

            # detector id for the events
            event_id_array = bank1_events['event_id'].value

            if self._splitter:
                # get event index array: same size as pulse times
                event_index_array = bank1_events['event_index'].value
                # get pulse times
                pulse_time_array = convert_pulses_to_datetime64(
                    bank1_events['event_time_zero'])
                subrun_eventindex_array = self._generate_subrun_event_indices(
                    pulse_time_array, event_index_array, event_id_array.size)
                # reduce memory foot print
                del pulse_time_array, event_index_array

        # split data
        subruns = list()
        if self._splitter:
            for subrun, start_event_index, stop_event_index in zip(
                    self._splitter.subruns.tolist(),
                    subrun_eventindex_array[::2].tolist(),
                    subrun_eventindex_array[1::2].tolist()):
                subruns.append(subrun)
                # get sub set of the events falling into this range
                # and count the occurrence of each event ID (aka detector ID) as counts on each detector pixel
                hist = np.bincount(
                    event_id_array[start_event_index:stop_event_index],
                    minlength=HIDRA_PIXEL_NUMBER)

                # mask (set to zero) the pixels that are not wanted
                if self.mask_array is not None:
                    assert hist.shape == self.mask_array.shape
                    hist *= self.mask_array

                # set it in the workspace
                self._hidra_workspace.set_raw_counts(int(subrun), hist)
        else:  # or histogram everything
            subruns.append(1)
            hist = np.bincount(event_id_array, minlength=HIDRA_PIXEL_NUMBER)

            # mask (set to zero) the pixels that are not wanted
            if self.mask_array is not None:
                assert hist.shape == self.mask_array.shape
                hist *= self.mask_array

            # set it in the workspace
            self._hidra_workspace.set_raw_counts(1, hist)

        return np.array(subruns)

    def split_sample_logs(self, subruns):
        r"""
        Partition each log entry according to the subruns

        Goal:
            1. set sample logs on the hidra workspace
            2. set duration on the hidra worksapce

        Returns
        -------
        dict
            Each key corresponds to one log name, and each value corresponds to an array of log values. Each item
            in this array corresponds to the average value of the log within a particular subrun
        """
        run_obj = self._event_wksp.run()

        # Example: if we have three subruns and the average value of log entry 'vx` for each subrun
        # is 0.1, 0.3, and 0.5, then we have  ample_log_dict['vx'] == np.array([0.1, 0.3, 0.5])
        sample_log_dict = dict()

        if self._splitter:
            log_array_size = self._splitter.subruns.shape[0]
        else:
            log_array_size = 1

        # loop through all available logs
        for log_name in run_obj.keys():
            # create and calculate the sample log
            sample_log_dict[log_name] = self.__split_property(
                run_obj, log_name, log_array_size)
        # END-FOR

        # create a fictional log for duration
        if HidraConstants.SUB_RUN_DURATION not in sample_log_dict:
            if self._splitter:
                sample_log_dict[
                    HidraConstants.SUB_RUN_DURATION] = self._splitter.durations
            else:
                duration = np.ndarray(shape=(log_array_size, ), dtype=float)
                duration[0] = run_obj.getPropertyAsSingleValue('duration')
                sample_log_dict[HidraConstants.SUB_RUN_DURATION] = duration

        # set the logs on the hidra workspace
        for log_name, log_value in sample_log_dict.items():
            if log_name in ['scan_index', HidraConstants.SUB_RUNS]:
                continue  # skip 'SUB_RUNS'
            # find the units of the log
            if log_name == HidraConstants.SUB_RUN_DURATION:
                log_units = 'second'
            else:
                log_units = run_obj.getProperty(log_name).units
            self._hidra_workspace.set_sample_log(log_name,
                                                 subruns,
                                                 log_value,
                                                 units=log_units)

        return sample_log_dict  # needed for testing

    def __split_property(self, runObj, log_name, log_array_size):
        """Calculate the mean value of the sample log "within" the sub run time range

        Parameters
        ----------
        runObj
        log_name
        log_array_size

        Returns
        -------
        numpy.ndarray
            split logs
        """
        # Init split sample logs
        log_property = runObj[log_name]
        log_dtype = log_property.dtype()
        split_log = np.ndarray(shape=(log_array_size, ), dtype=log_dtype)

        if self._splitter and isinstance(
                log_property.value,
                np.ndarray) and str(log_dtype) in ['f', 'i']:
            # Float or integer time series property: split and get time average
            for i_sb in range(log_array_size):
                split_log[i_sb] = calculate_sub_run_time_average(
                    log_property, self._splitter.propertyFilters[i_sb])
        else:
            try:
                split_log[:] = runObj.getPropertyAsSingleValue(log_name)
            except ValueError:
                if isinstance(log_property.value, str):
                    split_log[:] = log_property.value
                elif isinstance(log_property.value, list):
                    split_log[:] = log_property.value[0]
                else:
                    raise ValueError(
                        'Cannot filter log "{}" of type "{}"'.format(
                            log_name, log_dtype))

        return split_log

    def convert(self, use_mantid=False):
        """Main method to convert NeXus file to HidraProject File by

        1. split the workspace to sub runs
        2. for each split workspace, aka a sub run, get the total counts for each spectrum and save to a 1D array

        Parameters
        ----------
        use_mantid : bool
            Flag to use Mantid library to convert NeXus (True);
            Otherwise, use PyRS/Python algorithms to convert NeXus

        Returns
        -------
        pyrs.core.workspaces.HidraWorkspace
            HidraWorkspace for converted data

        """
        if use_mantid:
            raise RuntimeError('use_mantid=True is no longer supported')

        # set counts to each sub run
        sub_runs = self.split_events_sub_runs()

        # set mask
        if self.mask_array is not None:
            self._hidra_workspace.set_detector_mask(self.mask_array,
                                                    is_default=True)

        self.split_sample_logs(sub_runs)

        # set the nominal wavelength from the nexus file
        runObj = self._event_wksp.run()
        if runObj.hasProperty('MonoSetting'):
            monosetting = MonoSetting.getFromIndex(
                runObj.getPropertyAsSingleValue('MonoSetting'))
        else:
            monosetting = MonoSetting.getFromRotation(
                runObj.getPropertyAsSingleValue('mrot'))
        self._hidra_workspace.set_wavelength(float(monosetting),
                                             calibrated=False)

        return self._hidra_workspace

    def save(self, projectfile):
        """
        Save workspace to Hidra project file
        """
        projectfile = os.path.abspath(
            projectfile)  # confirm absolute path to make logs more readable
        checkdatatypes.check_file_name(
            projectfile,
            check_exist=False,
            check_writable=True,
            is_dir=False,
            description='Converted Hidra project file')

        # remove file if it already exists
        if os.path.exists(projectfile):
            self._log.information(
                'Projectfile "{}" exists, removing previous version'.format(
                    projectfile))
            os.remove(projectfile)

        # save
        hydra_file = HidraProjectFile(projectfile,
                                      HidraProjectFileMode.OVERWRITE)

        # Set geometry
        hydra_file.write_instrument_geometry(
            HidraSetup(self._hidra_workspace.get_instrument_setup()))
        # save experimental data/detector counts
        self._hidra_workspace.save_experimental_data(hydra_file)
Exemplo n.º 32
0
    def PyExec(self):
        runs = self.getProperty("Filename").value

        if not runs:
            ipts = self.getProperty("IPTS").value
            runs = ['/HFIR/HB2C/IPTS-{}/nexus/HB2C_{}.nxs.h5'.format(ipts, run) for run in self.getProperty("RunNumbers").value]

        grouping = self.getProperty("Grouping").value
        if grouping == 'None':
            grouping = 1
        else:
            grouping = 2 if grouping == '2x2' else 4

        x_dim = 480*8 // grouping
        y_dim = 512 // grouping

        number_of_runs = len(runs)

        data_array = np.empty((number_of_runs, x_dim, y_dim), dtype=np.float64)

        s1_array = []
        duration_array = []
        run_number_array = []
        monitor_count_array = []

        progress = Progress(self, 0.0, 1.0, number_of_runs+3)

        for n, run in enumerate(runs):
            progress.report('Loading: '+run)
            with h5py.File(run, 'r') as f:
                bc = np.zeros((512*480*8),dtype=np.int64)
                for b in range(8):
                    bc += np.bincount(f['/entry/bank'+str(b+1)+'_events/event_id'].value,minlength=512*480*8)
                bc = bc.reshape((480*8, 512))
                if grouping == 2:
                    bc = bc[::2,::2]+bc[1::2,::2]+bc[::2,1::2]+bc[1::2,1::2]
                elif grouping == 4:
                    bc = (bc[::4,::4]    + bc[1::4,::4]  + bc[2::4,::4]  + bc[3::4,::4]
                          + bc[::4,1::4] + bc[1::4,1::4] + bc[2::4,1::4] + bc[3::4,1::4]
                          + bc[::4,2::4] + bc[1::4,2::4] + bc[2::4,2::4] + bc[3::4,2::4]
                          + bc[::4,3::4] + bc[1::4,3::4] + bc[2::4,3::4] + bc[3::4,3::4])
                data_array[n] = bc
                s1_array.append(f['/entry/DASlogs/HB2C:Mot:s1.RBV/average_value'].value[0])
                duration_array.append(float(f['/entry/duration'].value[0]))
                run_number_array.append(float(f['/entry/run_number'].value[0]))
                monitor_count_array.append(float(f['/entry/monitor1/total_counts'].value[0]))

        progress.report('Creating MDHistoWorkspace')
        createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace", enableLogging=False)
        createWS_alg.setProperty("SignalInput", data_array)
        createWS_alg.setProperty("ErrorInput", np.sqrt(data_array))
        createWS_alg.setProperty("Dimensionality", 3)
        createWS_alg.setProperty("Extents", '0.5,{},0.5,{},0.5,{}'.format(y_dim+0.5, x_dim+0.5, number_of_runs+0.5))
        createWS_alg.setProperty("NumberOfBins", '{},{},{}'.format(y_dim,x_dim,number_of_runs))
        createWS_alg.setProperty("Names", 'y,x,scanIndex')
        createWS_alg.setProperty("Units", 'bin,bin,number')
        createWS_alg.execute()
        outWS = createWS_alg.getProperty("OutputWorkspace").value

        progress.report('Getting IDF')
        # Get the instrument and some logs from the first file; assume the rest are the same
        _tmp_ws = LoadEventNexus(runs[0], MetaDataOnly=True, EnableLogging=False)
        # The following logs should be the same for all runs
        RemoveLogs(_tmp_ws,
                   KeepLogs='HB2C:Mot:detz,HB2C:Mot:detz.RBV,HB2C:Mot:s2,HB2C:Mot:s2.RBV,'
                   'HB2C:Mot:sgl,HB2C:Mot:sgl.RBV,HB2C:Mot:sgu,HB2C:Mot:sgu.RBV,'
                   'run_title,start_time,experiment_identifier,HB2C:CS:CrystalAlign:UBMatrix',
                   EnableLogging=False)

        try:
            ub = np.array(re.findall(r'-?\d+\.*\d*', _tmp_ws.run().getProperty('HB2C:CS:CrystalAlign:UBMatrix').value[0]),
                          dtype=np.float).reshape(3,3)
            sgl = np.deg2rad(_tmp_ws.run().getProperty('HB2C:Mot:sgl.RBV').value[0]) # 'HB2C:Mot:sgl.RBV,1,0,0,-1'
            sgu = np.deg2rad(_tmp_ws.run().getProperty('HB2C:Mot:sgu.RBV').value[0]) # 'HB2C:Mot:sgu.RBV,0,0,1,-1'
            sgl_a = np.array([[           1,            0,           0],
                              [           0,  np.cos(sgl), np.sin(sgl)],
                              [           0, -np.sin(sgl), np.cos(sgl)]])
            sgu_a = np.array([[ np.cos(sgu),  np.sin(sgu),           0],
                              [-np.sin(sgu),  np.cos(sgu),           0],
                              [           0,            0,           1]])
            UB = sgl_a.dot(sgu_a).dot(ub) # Apply the Goniometer tilts to the UB matrix
            SetUB(_tmp_ws, UB=UB, EnableLogging=False)
        except (RuntimeError, ValueError):
            SetUB(_tmp_ws, EnableLogging=False)

        if grouping > 1:
            _tmp_group, _, _ = CreateGroupingWorkspace(InputWorkspace=_tmp_ws, EnableLogging=False)

            group_number = 0
            for x in range(0,480*8,grouping):
                for y in range(0,512,grouping):
                    group_number += 1
                    for j in range(grouping):
                        for i in range(grouping):
                            _tmp_group.dataY(y+i+(x+j)*512)[0] = group_number

            _tmp_ws = GroupDetectors(InputWorkspace=_tmp_ws, CopyGroupingFromWorkspace=_tmp_group, EnableLogging=False)
            DeleteWorkspace(_tmp_group, EnableLogging=False)

        progress.report('Adding logs')

        # Hack: ConvertToMD is needed so that a deep copy of the ExperimentInfo can happen
        # outWS.addExperimentInfo(_tmp_ws) # This doesn't work but should, when you delete `ws` `outWS` also loses it's ExperimentInfo
        _tmp_ws = Rebin(_tmp_ws, '0,1,2', EnableLogging=False)
        _tmp_ws = ConvertToMD(_tmp_ws, dEAnalysisMode='Elastic', EnableLogging=False, PreprocDetectorsWS='__PreprocessedDetectorsWS')

        preprocWS = mtd['__PreprocessedDetectorsWS']
        twotheta = preprocWS.column(2)
        azimuthal = preprocWS.column(3)

        outWS.copyExperimentInfos(_tmp_ws)
        DeleteWorkspace(_tmp_ws, EnableLogging=False)
        DeleteWorkspace('__PreprocessedDetectorsWS', EnableLogging=False)
        # end Hack

        outWS.getExperimentInfo(0).run().addProperty('s1', s1_array, True)
        outWS.getExperimentInfo(0).run().getProperty('s1').units = 'deg'
        outWS.getExperimentInfo(0).run().addProperty('duration', duration_array, True)
        outWS.getExperimentInfo(0).run().getProperty('duration').units = 'second'
        outWS.getExperimentInfo(0).run().addProperty('run_number', run_number_array, True)
        outWS.getExperimentInfo(0).run().addProperty('monitor_count', monitor_count_array, True)
        outWS.getExperimentInfo(0).run().addProperty('twotheta', twotheta, True)
        outWS.getExperimentInfo(0).run().addProperty('azimuthal', azimuthal, True)

        self.setProperty("OutputWorkspace", outWS)