Beispiel #1
0
class NeXusConvertingApp:
    """
    Convert NeXus file to Hidra project file
    """
    def __init__(self,
                 nexus_file_name,
                 mask_file_name=None,
                 extra_logs=list()):
        """Initialization

        Parameters
        ----------
        nexus_file_name : str
            Name of NeXus file
        mask_file_name : str
            Name of masking file
        extra_logs : list, tuple
            list of string with no default logs to keep in project file
        """
        # configure logging for this class
        self._log = Logger(__name__)

        # validate NeXus file exists
        checkdatatypes.check_file_name(nexus_file_name, True, False, False,
                                       'NeXus file')
        self._nexus_name = nexus_file_name

        # validate mask file exists
        if mask_file_name is None:
            self._mask_file_name = None
        else:
            checkdatatypes.check_file_name(mask_file_name, True, False, False,
                                           'Mask file')
            self._mask_file_name = mask_file_name
            if not mask_file_name.lower().endswith('.xml'):
                raise NotImplementedError(
                    'Only Mantid mask in XML format is supported now.  File '
                    '{} with type {} is not supported yet.'
                    ''.format(mask_file_name,
                              mask_file_name.split('.')[-1]))

        # workspaces
        self._event_ws_name = os.path.basename(nexus_file_name).split('.')[0]

        logs_to_keep = list(extra_logs)
        logs_to_keep.extend(DEFAULT_KEEP_LOGS)

        self.__load_logs(logs_to_keep)

        # load the mask
        self.mask_array = None  # TODO to promote direct access
        if mask_file_name:
            self.__load_mask(mask_file_name)

        # create the hidra workspace
        self._hidra_workspace = workspaces.HidraWorkspace(self._nexus_name)

        # Set a default instrument with this workspace
        # set up instrument
        # initialize instrument with hard coded values
        instrument = DENEXDetectorGeometry(NUM_PIXEL_1D, NUM_PIXEL_1D,
                                           PIXEL_SIZE, PIXEL_SIZE, ARM_LENGTH,
                                           False)

        self._hidra_workspace.set_instrument_geometry(instrument)

        # project file
        self._project_file = None

    def __del__(self):
        if self._event_ws_name in mtd:
            DeleteWorkspace(Workspace=self._event_ws_name, EnableLogging=False)

    def __load_logs(self, logs_to_keep):
        '''Use mantid to load the logs then set up the Splitters object'''
        self._event_wksp = LoadEventNexus(Filename=self._nexus_name,
                                          OutputWorkspace=self._event_ws_name,
                                          MetaDataOnly=True,
                                          LoadMonitors=False)

        # remove unwanted sample logs
        RemoveLogs(self._event_wksp, KeepLogs=logs_to_keep)

        # raise an exception if there is only one scan index entry
        # this is an underlying assumption of the rest of the code
        if self._event_wksp.run()['scan_index'].size() == 1 \
                or np.unique(self._event_wksp.run()['scan_index'].value).size == 1:
            self._splitter = None
        else:
            # object to be used for splitting times
            self._splitter = Splitter(self._event_wksp.run())

    def __load_mask(self, mask_file_name):
        # Check input
        checkdatatypes.check_file_name(mask_file_name, True, False, False,
                                       'Mask XML file')
        if self._event_wksp is None:
            raise RuntimeError(
                'Meta data only workspace {} does not exist'.format(
                    self._event_ws_name))

        # Load mask XML to workspace
        mask_ws_name = os.path.basename(mask_file_name.split('.')[0])
        mask_ws = LoadMask(Instrument='nrsf2',
                           InputFile=mask_file_name,
                           RefWorkspace=self._event_wksp,
                           OutputWorkspace=mask_ws_name)

        # Extract mask out
        # get the Y array from mask workspace: shape = (1048576, 1)
        self.mask_array = mask_ws.extractY().flatten()
        # in Mantid's mask workspace: one stands delete, zero stands for keep
        # we multiply by the value: zero is delete, one is keep
        self.mask_array = 1 - self.mask_array.astype(int)

        # clean up
        DeleteWorkspace(Workspace=mask_ws_name)

    def _generate_subrun_event_indices(self, pulse_time_array,
                                       event_index_array, num_events):
        # convert times to array indices - a[i-1] < v <= a[i]
        subrun_pulseindex_array = np.searchsorted(pulse_time_array,
                                                  self._splitter.times)

        # locations that are greater than the number of pixels
        mask = subrun_pulseindex_array < event_index_array.size

        # it doesn't matter what the initial values are
        subrun_event_index = np.empty(subrun_pulseindex_array.size,
                                      dtype=subrun_pulseindex_array.dtype)
        # standard method is mappping
        subrun_event_index[mask] = event_index_array[
            subrun_pulseindex_array[mask]]
        # things off the end should be set to consume the rest of the events
        subrun_event_index[np.logical_not(mask)] = num_events + 1

        # make sure filter is sorted
        if not np.all(subrun_event_index[:-1] <= subrun_event_index[1:]):
            raise RuntimeError('Filter indices are not ordered: {}'.format(
                subrun_event_index))

        return subrun_event_index

    def split_events_sub_runs(self):
        '''Filter the data by ``scan_index`` and set counts array in the hidra_workspace'''
        # Load: this h5 will be opened all the time
        with h5py.File(self._nexus_name, 'r') as nexus_h5:
            bank1_events = nexus_h5['entry']['bank1_events']
            # Check number of neutron events.  Raise exception if there is no neutron event
            if bank1_events['total_counts'].value[0] < 0.1:
                # no counts
                raise RuntimeError(
                    'Run {} has no count.  Proper reduction requires the run to have count'
                    ''.format(self._nexus_name))

            # detector id for the events
            event_id_array = bank1_events['event_id'].value

            if self._splitter:
                # get event index array: same size as pulse times
                event_index_array = bank1_events['event_index'].value
                # get pulse times
                pulse_time_array = convert_pulses_to_datetime64(
                    bank1_events['event_time_zero'])
                subrun_eventindex_array = self._generate_subrun_event_indices(
                    pulse_time_array, event_index_array, event_id_array.size)
                # reduce memory foot print
                del pulse_time_array, event_index_array

        # split data
        subruns = list()
        if self._splitter:
            for subrun, start_event_index, stop_event_index in zip(
                    self._splitter.subruns.tolist(),
                    subrun_eventindex_array[::2].tolist(),
                    subrun_eventindex_array[1::2].tolist()):
                subruns.append(subrun)
                # get sub set of the events falling into this range
                # and count the occurrence of each event ID (aka detector ID) as counts on each detector pixel
                hist = np.bincount(
                    event_id_array[start_event_index:stop_event_index],
                    minlength=HIDRA_PIXEL_NUMBER)

                # mask (set to zero) the pixels that are not wanted
                if self.mask_array is not None:
                    assert hist.shape == self.mask_array.shape
                    hist *= self.mask_array

                # set it in the workspace
                self._hidra_workspace.set_raw_counts(int(subrun), hist)
        else:  # or histogram everything
            subruns.append(1)
            hist = np.bincount(event_id_array, minlength=HIDRA_PIXEL_NUMBER)

            # mask (set to zero) the pixels that are not wanted
            if self.mask_array is not None:
                assert hist.shape == self.mask_array.shape
                hist *= self.mask_array

            # set it in the workspace
            self._hidra_workspace.set_raw_counts(1, hist)

        return np.array(subruns)

    def split_sample_logs(self, subruns):
        r"""
        Partition each log entry according to the subruns

        Goal:
            1. set sample logs on the hidra workspace
            2. set duration on the hidra worksapce

        Returns
        -------
        dict
            Each key corresponds to one log name, and each value corresponds to an array of log values. Each item
            in this array corresponds to the average value of the log within a particular subrun
        """
        run_obj = self._event_wksp.run()

        # Example: if we have three subruns and the average value of log entry 'vx` for each subrun
        # is 0.1, 0.3, and 0.5, then we have  ample_log_dict['vx'] == np.array([0.1, 0.3, 0.5])
        sample_log_dict = dict()

        if self._splitter:
            log_array_size = self._splitter.subruns.shape[0]
        else:
            log_array_size = 1

        # loop through all available logs
        for log_name in run_obj.keys():
            # create and calculate the sample log
            sample_log_dict[log_name] = self.__split_property(
                run_obj, log_name, log_array_size)
        # END-FOR

        # create a fictional log for duration
        if HidraConstants.SUB_RUN_DURATION not in sample_log_dict:
            if self._splitter:
                sample_log_dict[
                    HidraConstants.SUB_RUN_DURATION] = self._splitter.durations
            else:
                duration = np.ndarray(shape=(log_array_size, ), dtype=float)
                duration[0] = run_obj.getPropertyAsSingleValue('duration')
                sample_log_dict[HidraConstants.SUB_RUN_DURATION] = duration

        # set the logs on the hidra workspace
        for log_name, log_value in sample_log_dict.items():
            if log_name in ['scan_index', HidraConstants.SUB_RUNS]:
                continue  # skip 'SUB_RUNS'
            # find the units of the log
            if log_name == HidraConstants.SUB_RUN_DURATION:
                log_units = 'second'
            else:
                log_units = run_obj.getProperty(log_name).units
            self._hidra_workspace.set_sample_log(log_name,
                                                 subruns,
                                                 log_value,
                                                 units=log_units)

        return sample_log_dict  # needed for testing

    def __split_property(self, runObj, log_name, log_array_size):
        """Calculate the mean value of the sample log "within" the sub run time range

        Parameters
        ----------
        runObj
        log_name
        log_array_size

        Returns
        -------
        numpy.ndarray
            split logs
        """
        # Init split sample logs
        log_property = runObj[log_name]
        log_dtype = log_property.dtype()
        split_log = np.ndarray(shape=(log_array_size, ), dtype=log_dtype)

        if self._splitter and isinstance(
                log_property.value,
                np.ndarray) and str(log_dtype) in ['f', 'i']:
            # Float or integer time series property: split and get time average
            for i_sb in range(log_array_size):
                split_log[i_sb] = calculate_sub_run_time_average(
                    log_property, self._splitter.propertyFilters[i_sb])
        else:
            try:
                split_log[:] = runObj.getPropertyAsSingleValue(log_name)
            except ValueError:
                if isinstance(log_property.value, str):
                    split_log[:] = log_property.value
                elif isinstance(log_property.value, list):
                    split_log[:] = log_property.value[0]
                else:
                    raise ValueError(
                        'Cannot filter log "{}" of type "{}"'.format(
                            log_name, log_dtype))

        return split_log

    def convert(self, use_mantid=False):
        """Main method to convert NeXus file to HidraProject File by

        1. split the workspace to sub runs
        2. for each split workspace, aka a sub run, get the total counts for each spectrum and save to a 1D array

        Parameters
        ----------
        use_mantid : bool
            Flag to use Mantid library to convert NeXus (True);
            Otherwise, use PyRS/Python algorithms to convert NeXus

        Returns
        -------
        pyrs.core.workspaces.HidraWorkspace
            HidraWorkspace for converted data

        """
        if use_mantid:
            raise RuntimeError('use_mantid=True is no longer supported')

        # set counts to each sub run
        sub_runs = self.split_events_sub_runs()

        # set mask
        if self.mask_array is not None:
            self._hidra_workspace.set_detector_mask(self.mask_array,
                                                    is_default=True)

        self.split_sample_logs(sub_runs)

        # set the nominal wavelength from the nexus file
        runObj = self._event_wksp.run()
        if runObj.hasProperty('MonoSetting'):
            monosetting = MonoSetting.getFromIndex(
                runObj.getPropertyAsSingleValue('MonoSetting'))
        else:
            monosetting = MonoSetting.getFromRotation(
                runObj.getPropertyAsSingleValue('mrot'))
        self._hidra_workspace.set_wavelength(float(monosetting),
                                             calibrated=False)

        return self._hidra_workspace

    def save(self, projectfile):
        """
        Save workspace to Hidra project file
        """
        projectfile = os.path.abspath(
            projectfile)  # confirm absolute path to make logs more readable
        checkdatatypes.check_file_name(
            projectfile,
            check_exist=False,
            check_writable=True,
            is_dir=False,
            description='Converted Hidra project file')

        # remove file if it already exists
        if os.path.exists(projectfile):
            self._log.information(
                'Projectfile "{}" exists, removing previous version'.format(
                    projectfile))
            os.remove(projectfile)

        # save
        hydra_file = HidraProjectFile(projectfile,
                                      HidraProjectFileMode.OVERWRITE)

        # Set geometry
        hydra_file.write_instrument_geometry(
            HidraSetup(self._hidra_workspace.get_instrument_setup()))
        # save experimental data/detector counts
        self._hidra_workspace.save_experimental_data(hydra_file)
Beispiel #2
0
    def PyExec(self):
        runs = self.getProperty("Filename").value

        if not runs:
            ipts = self.getProperty("IPTS").value
            runs = ['/HFIR/HB2C/IPTS-{}/nexus/HB2C_{}.nxs.h5'.format(ipts, run) for run in self.getProperty("RunNumbers").value]

        grouping = self.getProperty("Grouping").value
        if grouping == 'None':
            grouping = 1
        else:
            grouping = 2 if grouping == '2x2' else 4

        x_dim = 480*8 // grouping
        y_dim = 512 // grouping

        number_of_runs = len(runs)

        data_array = np.empty((number_of_runs, x_dim, y_dim), dtype=np.float64)

        s1_array = []
        duration_array = []
        run_number_array = []
        monitor_count_array = []

        progress = Progress(self, 0.0, 1.0, number_of_runs+3)

        for n, run in enumerate(runs):
            progress.report('Loading: '+run)
            with h5py.File(run, 'r') as f:
                bc = np.zeros((512*480*8),dtype=np.int64)
                for b in range(8):
                    bc += np.bincount(f['/entry/bank'+str(b+1)+'_events/event_id'].value,minlength=512*480*8)
                bc = bc.reshape((480*8, 512))
                if grouping == 2:
                    bc = bc[::2,::2]+bc[1::2,::2]+bc[::2,1::2]+bc[1::2,1::2]
                elif grouping == 4:
                    bc = (bc[::4,::4]    + bc[1::4,::4]  + bc[2::4,::4]  + bc[3::4,::4]
                          + bc[::4,1::4] + bc[1::4,1::4] + bc[2::4,1::4] + bc[3::4,1::4]
                          + bc[::4,2::4] + bc[1::4,2::4] + bc[2::4,2::4] + bc[3::4,2::4]
                          + bc[::4,3::4] + bc[1::4,3::4] + bc[2::4,3::4] + bc[3::4,3::4])
                data_array[n] = bc
                s1_array.append(f['/entry/DASlogs/HB2C:Mot:s1.RBV/average_value'].value[0])
                duration_array.append(float(f['/entry/duration'].value[0]))
                run_number_array.append(float(f['/entry/run_number'].value[0]))
                monitor_count_array.append(float(f['/entry/monitor1/total_counts'].value[0]))

        progress.report('Creating MDHistoWorkspace')
        createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace", enableLogging=False)
        createWS_alg.setProperty("SignalInput", data_array)
        createWS_alg.setProperty("ErrorInput", np.sqrt(data_array))
        createWS_alg.setProperty("Dimensionality", 3)
        createWS_alg.setProperty("Extents", '0.5,{},0.5,{},0.5,{}'.format(y_dim+0.5, x_dim+0.5, number_of_runs+0.5))
        createWS_alg.setProperty("NumberOfBins", '{},{},{}'.format(y_dim,x_dim,number_of_runs))
        createWS_alg.setProperty("Names", 'y,x,scanIndex')
        createWS_alg.setProperty("Units", 'bin,bin,number')
        createWS_alg.execute()
        outWS = createWS_alg.getProperty("OutputWorkspace").value

        progress.report('Getting IDF')
        # Get the instrument and some logs from the first file; assume the rest are the same
        _tmp_ws = LoadEventNexus(runs[0], MetaDataOnly=True, EnableLogging=False)
        # The following logs should be the same for all runs
        RemoveLogs(_tmp_ws,
                   KeepLogs='HB2C:Mot:detz,HB2C:Mot:detz.RBV,HB2C:Mot:s2,HB2C:Mot:s2.RBV,'
                   'HB2C:Mot:sgl,HB2C:Mot:sgl.RBV,HB2C:Mot:sgu,HB2C:Mot:sgu.RBV,'
                   'run_title,start_time,experiment_identifier,HB2C:CS:CrystalAlign:UBMatrix',
                   EnableLogging=False)

        try:
            ub = np.array(re.findall(r'-?\d+\.*\d*', _tmp_ws.run().getProperty('HB2C:CS:CrystalAlign:UBMatrix').value[0]),
                          dtype=np.float).reshape(3,3)
            sgl = np.deg2rad(_tmp_ws.run().getProperty('HB2C:Mot:sgl.RBV').value[0]) # 'HB2C:Mot:sgl.RBV,1,0,0,-1'
            sgu = np.deg2rad(_tmp_ws.run().getProperty('HB2C:Mot:sgu.RBV').value[0]) # 'HB2C:Mot:sgu.RBV,0,0,1,-1'
            sgl_a = np.array([[           1,            0,           0],
                              [           0,  np.cos(sgl), np.sin(sgl)],
                              [           0, -np.sin(sgl), np.cos(sgl)]])
            sgu_a = np.array([[ np.cos(sgu),  np.sin(sgu),           0],
                              [-np.sin(sgu),  np.cos(sgu),           0],
                              [           0,            0,           1]])
            UB = sgl_a.dot(sgu_a).dot(ub) # Apply the Goniometer tilts to the UB matrix
            SetUB(_tmp_ws, UB=UB, EnableLogging=False)
        except (RuntimeError, ValueError):
            SetUB(_tmp_ws, EnableLogging=False)

        if grouping > 1:
            _tmp_group, _, _ = CreateGroupingWorkspace(InputWorkspace=_tmp_ws, EnableLogging=False)

            group_number = 0
            for x in range(0,480*8,grouping):
                for y in range(0,512,grouping):
                    group_number += 1
                    for j in range(grouping):
                        for i in range(grouping):
                            _tmp_group.dataY(y+i+(x+j)*512)[0] = group_number

            _tmp_ws = GroupDetectors(InputWorkspace=_tmp_ws, CopyGroupingFromWorkspace=_tmp_group, EnableLogging=False)
            DeleteWorkspace(_tmp_group, EnableLogging=False)

        progress.report('Adding logs')

        # Hack: ConvertToMD is needed so that a deep copy of the ExperimentInfo can happen
        # outWS.addExperimentInfo(_tmp_ws) # This doesn't work but should, when you delete `ws` `outWS` also loses it's ExperimentInfo
        _tmp_ws = Rebin(_tmp_ws, '0,1,2', EnableLogging=False)
        _tmp_ws = ConvertToMD(_tmp_ws, dEAnalysisMode='Elastic', EnableLogging=False, PreprocDetectorsWS='__PreprocessedDetectorsWS')

        preprocWS = mtd['__PreprocessedDetectorsWS']
        twotheta = preprocWS.column(2)
        azimuthal = preprocWS.column(3)

        outWS.copyExperimentInfos(_tmp_ws)
        DeleteWorkspace(_tmp_ws, EnableLogging=False)
        DeleteWorkspace('__PreprocessedDetectorsWS', EnableLogging=False)
        # end Hack

        outWS.getExperimentInfo(0).run().addProperty('s1', s1_array, True)
        outWS.getExperimentInfo(0).run().getProperty('s1').units = 'deg'
        outWS.getExperimentInfo(0).run().addProperty('duration', duration_array, True)
        outWS.getExperimentInfo(0).run().getProperty('duration').units = 'second'
        outWS.getExperimentInfo(0).run().addProperty('run_number', run_number_array, True)
        outWS.getExperimentInfo(0).run().addProperty('monitor_count', monitor_count_array, True)
        outWS.getExperimentInfo(0).run().addProperty('twotheta', twotheta, True)
        outWS.getExperimentInfo(0).run().addProperty('azimuthal', azimuthal, True)

        self.setProperty("OutputWorkspace", outWS)
Beispiel #3
0
from mantid.simpleapi import LoadEventNexus
ws = LoadEventNexus(Filename='/HFIR/HB2C/IPTS-7776/nexus/HB2C_26625.nxs.h5',
                    MetaDataOnly=True)
if ws.run().hasProperty('HB2C:CS:CrystalAlign:UBMatrix'):
    ub = ','.join(
        ws.run().getProperty('HB2C:CS:CrystalAlign:UBMatrix').value[0].replace(
            '[', '').replace(']', '').split())
    print(ub)
Beispiel #4
0
    def load_and_group(self, runs: List[str]) -> IMDHistoWorkspace:
        """
        Load the data with given grouping
        """
        # grouping config
        grouping = self.getProperty("Grouping").value
        if grouping == 'None':
            grouping = 1
        else:
            grouping = 2 if grouping == '2x2' else 4
        number_of_runs = len(runs)

        x_dim = 480 * 8 // grouping
        y_dim = 512 // grouping

        data_array = np.empty((number_of_runs, x_dim, y_dim), dtype=np.float64)

        s1_array = []
        duration_array = []
        run_number_array = []
        monitor_count_array = []

        progress = Progress(self, 0.0, 1.0, number_of_runs + 3)

        for n, run in enumerate(runs):
            progress.report('Loading: ' + run)
            with h5py.File(run, 'r') as f:
                bc = np.zeros((512 * 480 * 8), dtype=np.int64)
                for b in range(8):
                    bc += np.bincount(f['/entry/bank' + str(b + 1) +
                                        '_events/event_id'].value,
                                      minlength=512 * 480 * 8)
                bc = bc.reshape((480 * 8, 512))
                if grouping == 2:
                    bc = bc[::2, ::2] + bc[1::2, ::2] + bc[::2,
                                                           1::2] + bc[1::2,
                                                                      1::2]
                elif grouping == 4:
                    bc = bc[::4, ::4] + bc[1::4, ::4] + bc[2::4, ::4] + bc[3::4, ::4] + bc[::4, 1::4] + bc[1::4, 1::4] + bc[2::4, 1::4] + \
                         bc[3::4, 1::4] + bc[::4, 2::4] + bc[1::4, 2::4] + bc[2::4, 2::4] + bc[3::4, 2::4] + bc[::4, 3::4] + \
                         bc[1::4, 3::4] + bc[2::4, 3::4] + bc[3::4, 3::4]
                data_array[n] = bc
                s1_array.append(
                    f['/entry/DASlogs/HB2C:Mot:s1.RBV/average_value'].value[0])
                duration_array.append(float(f['/entry/duration'].value[0]))
                run_number_array.append(float(f['/entry/run_number'].value[0]))
                monitor_count_array.append(
                    float(f['/entry/monitor1/total_counts'].value[0]))

        progress.report('Creating MDHistoWorkspace')
        createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace",
                                                 enableLogging=False)
        createWS_alg.setProperty("SignalInput", data_array)
        createWS_alg.setProperty("ErrorInput", np.sqrt(data_array))
        createWS_alg.setProperty("Dimensionality", 3)
        createWS_alg.setProperty(
            "Extents", '0.5,{},0.5,{},0.5,{}'.format(y_dim + 0.5, x_dim + 0.5,
                                                     number_of_runs + 0.5))
        createWS_alg.setProperty(
            "NumberOfBins", '{},{},{}'.format(y_dim, x_dim, number_of_runs))
        createWS_alg.setProperty("Names", 'y,x,scanIndex')
        createWS_alg.setProperty("Units", 'bin,bin,number')
        createWS_alg.execute()
        outWS = createWS_alg.getProperty("OutputWorkspace").value

        progress.report('Getting IDF')
        # Get the instrument and some logs from the first file; assume the rest are the same
        _tmp_ws = LoadEventNexus(runs[0],
                                 MetaDataOnly=True,
                                 EnableLogging=False)
        # The following logs should be the same for all runs
        RemoveLogs(
            _tmp_ws,
            KeepLogs=
            'HB2C:Mot:detz,HB2C:Mot:detz.RBV,HB2C:Mot:s2,HB2C:Mot:s2.RBV,'
            'HB2C:Mot:sgl,HB2C:Mot:sgl.RBV,HB2C:Mot:sgu,HB2C:Mot:sgu.RBV,'
            'run_title,start_time,experiment_identifier,HB2C:CS:CrystalAlign:UBMatrix',
            EnableLogging=False)

        time_ns_array = _tmp_ws.run().startTime().totalNanoseconds(
        ) + np.append(0,
                      np.cumsum(duration_array) * 1e9)[:-1]

        try:
            ub = np.array(re.findall(
                r'-?\d+\.*\d*',
                _tmp_ws.run().getProperty(
                    'HB2C:CS:CrystalAlign:UBMatrix').value[0]),
                          dtype=float).reshape(3, 3)
            sgl = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgl.RBV').value[0])  # 'HB2C:Mot:sgl.RBV,1,0,0,-1'
            sgu = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgu.RBV').value[0])  # 'HB2C:Mot:sgu.RBV,0,0,1,-1'
            sgl_a = np.array([[1, 0, 0], [0, np.cos(sgl),
                                          np.sin(sgl)],
                              [0, -np.sin(sgl), np.cos(sgl)]])
            sgu_a = np.array([[np.cos(sgu), np.sin(sgu), 0],
                              [-np.sin(sgu), np.cos(sgu), 0], [0, 0, 1]])
            UB = sgl_a.dot(sgu_a).dot(
                ub)  # Apply the Goniometer tilts to the UB matrix
            SetUB(_tmp_ws, UB=UB, EnableLogging=False)
        except (RuntimeError, ValueError):
            SetUB(_tmp_ws, EnableLogging=False)

        if grouping > 1:
            _tmp_group, _, _ = CreateGroupingWorkspace(InputWorkspace=_tmp_ws,
                                                       EnableLogging=False)

            group_number = 0
            for x in range(0, 480 * 8, grouping):
                for y in range(0, 512, grouping):
                    group_number += 1
                    for j in range(grouping):
                        for i in range(grouping):
                            _tmp_group.dataY(y + i +
                                             (x + j) * 512)[0] = group_number

            _tmp_ws = GroupDetectors(InputWorkspace=_tmp_ws,
                                     CopyGroupingFromWorkspace=_tmp_group,
                                     EnableLogging=False)
            DeleteWorkspace(_tmp_group, EnableLogging=False)

        progress.report('Adding logs')

        # Hack: ConvertToMD is needed so that a deep copy of the ExperimentInfo can happen
        # outWS.addExperimentInfo(_tmp_ws) # This doesn't work but should, when you delete `ws` `outWS` also loses it's ExperimentInfo
        _tmp_ws = Rebin(_tmp_ws, '0,1,2', EnableLogging=False)
        _tmp_ws = ConvertToMD(_tmp_ws,
                              dEAnalysisMode='Elastic',
                              EnableLogging=False,
                              PreprocDetectorsWS='__PreprocessedDetectorsWS')

        preprocWS = mtd['__PreprocessedDetectorsWS']
        twotheta = preprocWS.column(2)
        azimuthal = preprocWS.column(3)

        outWS.copyExperimentInfos(_tmp_ws)
        DeleteWorkspace(_tmp_ws, EnableLogging=False)
        DeleteWorkspace('__PreprocessedDetectorsWS', EnableLogging=False)
        # end Hack

        add_time_series_property('s1',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, s1_array)
        outWS.getExperimentInfo(0).run().getProperty('s1').units = 'deg'
        add_time_series_property('duration',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, duration_array)
        outWS.getExperimentInfo(0).run().getProperty(
            'duration').units = 'second'
        outWS.getExperimentInfo(0).run().addProperty('run_number',
                                                     run_number_array, True)
        add_time_series_property('monitor_count',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, monitor_count_array)
        outWS.getExperimentInfo(0).run().addProperty('twotheta', twotheta,
                                                     True)
        outWS.getExperimentInfo(0).run().addProperty('azimuthal', azimuthal,
                                                     True)

        setGoniometer_alg = self.createChildAlgorithm("SetGoniometer",
                                                      enableLogging=False)
        setGoniometer_alg.setProperty("Workspace", outWS)
        setGoniometer_alg.setProperty("Axis0", 's1,0,1,0,1')
        setGoniometer_alg.setProperty("Average", False)
        setGoniometer_alg.execute()

        return outWS
Beispiel #5
0
import h5py
from mantid.simpleapi import LoadEventNexus

filename = '/HFIR/HB2C/IPTS-7776/nexus/HB2C_26625.nxs.h5'

ws = LoadEventNexus(Filename=filename, MetaDataOnly=True)
if ws.run().hasProperty('HB2C:CS:ITEMS:Nature'):
    nature = ws.run().getProperty('HB2C:CS:ITEMS:Nature').value[0]
    print(nature)
    print('Powder: {}'.format(nature == "Powder"))

with h5py.File(filename, 'r') as f:
    if '/entry/DASlogs/HB2C:CS:ITEMS:Nature' in f:
        nature = f['/entry/DASlogs/HB2C:CS:ITEMS:Nature/value'].value[0][0]
        print(nature)
        print('Powder: {}'.format(nature == "Powder"))