def __accumulate(self, chunkname, sumname, chunkunfocusname, sumuunfocusname, firstrun, removelogs=False):
        """accumulate newdata `wkspname` into sum `sumwkspname` and delete `wkspname`"""
        # the first call to accumulate to a specific target should be a simple rename
        self.log().debug('__accumulate({}, {}, {}, {}, {})'.format(chunkname, sumname, chunkunfocusname,
                                                                   sumuunfocusname, firstrun))
        if chunkname == sumname:
            return  # there is nothing to be done

        if not firstrun:
            # if the sum workspace doesn't already exist, just rename
            if not mtd.doesExist(sumname):
                firstrun = True

        if firstrun:
            if chunkname != sumname:
                RenameWorkspace(InputWorkspace=chunkname, OutputWorkspace=sumname)
            if chunkunfocusname and chunkunfocusname != sumuunfocusname:
                RenameWorkspace(InputWorkspace=chunkunfocusname, OutputWorkspace=sumuunfocusname)
        else:
            if removelogs:
                RemoveLogs(Workspace=chunkname)  # accumulation has them already
            RebinToWorkspace(WorkspaceToRebin=chunkname, WorkspaceToMatch=sumname,
                             OutputWorkspace=chunkname)
            Plus(LHSWorkspace=sumname, RHSWorkspace=chunkname, OutputWorkspace=sumname,
                 ClearRHSWorkspace=self.kwargs['PreserveEvents'])
            DeleteWorkspace(Workspace=chunkname)
            self.__compressEvents(sumname)  # could be smarter about when to run

            if chunkunfocusname and chunkunfocusname != sumuunfocusname:
                if removelogs:
                    RemoveLogs(Workspace=chunkunfocusname)  # accumulation has them already
                Plus(LHSWorkspace=sumuunfocusname, RHSWorkspace=chunkunfocusname, OutputWorkspace=sumuunfocusname,
                     ClearRHSWorkspace=self.kwargs['PreserveEvents'])
                DeleteWorkspace(Workspace=chunkunfocusname)
                self.__compressEvents(sumuunfocusname)  # could be smarter about when to run
Exemplo n.º 2
0
    def __load_logs(self, logs_to_keep):
        '''Use mantid to load the logs then set up the Splitters object'''
        self._event_wksp = LoadEventNexus(Filename=self._nexus_name,
                                          OutputWorkspace=self._event_ws_name,
                                          MetaDataOnly=True,
                                          LoadMonitors=False)

        # remove unwanted sample logs
        RemoveLogs(self._event_wksp, KeepLogs=logs_to_keep)

        # raise an exception if there is only one scan index entry
        # this is an underlying assumption of the rest of the code
        if self._event_wksp.run()['scan_index'].size() == 1 \
                or np.unique(self._event_wksp.run()['scan_index'].value).size == 1:
            self._splitter = None
        else:
            # object to be used for splitting times
            self._splitter = Splitter(self._event_wksp.run())
Exemplo n.º 3
0
    def load_and_group(self, runs: List[str]) -> IMDHistoWorkspace:
        """
        Load the data with given grouping
        """
        # grouping config
        grouping = self.getProperty("Grouping").value
        if grouping == 'None':
            grouping = 1
        else:
            grouping = 2 if grouping == '2x2' else 4
        number_of_runs = len(runs)

        x_dim = 480 * 8 // grouping
        y_dim = 512 // grouping

        data_array = np.empty((number_of_runs, x_dim, y_dim), dtype=np.float64)

        s1_array = []
        duration_array = []
        run_number_array = []
        monitor_count_array = []

        progress = Progress(self, 0.0, 1.0, number_of_runs + 3)

        for n, run in enumerate(runs):
            progress.report('Loading: ' + run)
            with h5py.File(run, 'r') as f:
                bc = np.zeros((512 * 480 * 8), dtype=np.int64)
                for b in range(8):
                    bc += np.bincount(f['/entry/bank' + str(b + 1) +
                                        '_events/event_id'].value,
                                      minlength=512 * 480 * 8)
                bc = bc.reshape((480 * 8, 512))
                if grouping == 2:
                    bc = bc[::2, ::2] + bc[1::2, ::2] + bc[::2,
                                                           1::2] + bc[1::2,
                                                                      1::2]
                elif grouping == 4:
                    bc = bc[::4, ::4] + bc[1::4, ::4] + bc[2::4, ::4] + bc[3::4, ::4] + bc[::4, 1::4] + bc[1::4, 1::4] + bc[2::4, 1::4] + \
                         bc[3::4, 1::4] + bc[::4, 2::4] + bc[1::4, 2::4] + bc[2::4, 2::4] + bc[3::4, 2::4] + bc[::4, 3::4] + \
                         bc[1::4, 3::4] + bc[2::4, 3::4] + bc[3::4, 3::4]
                data_array[n] = bc
                s1_array.append(
                    f['/entry/DASlogs/HB2C:Mot:s1.RBV/average_value'].value[0])
                duration_array.append(float(f['/entry/duration'].value[0]))
                run_number_array.append(float(f['/entry/run_number'].value[0]))
                monitor_count_array.append(
                    float(f['/entry/monitor1/total_counts'].value[0]))

        progress.report('Creating MDHistoWorkspace')
        createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace",
                                                 enableLogging=False)
        createWS_alg.setProperty("SignalInput", data_array)
        createWS_alg.setProperty("ErrorInput", np.sqrt(data_array))
        createWS_alg.setProperty("Dimensionality", 3)
        createWS_alg.setProperty(
            "Extents", '0.5,{},0.5,{},0.5,{}'.format(y_dim + 0.5, x_dim + 0.5,
                                                     number_of_runs + 0.5))
        createWS_alg.setProperty(
            "NumberOfBins", '{},{},{}'.format(y_dim, x_dim, number_of_runs))
        createWS_alg.setProperty("Names", 'y,x,scanIndex')
        createWS_alg.setProperty("Units", 'bin,bin,number')
        createWS_alg.execute()
        outWS = createWS_alg.getProperty("OutputWorkspace").value

        progress.report('Getting IDF')
        # Get the instrument and some logs from the first file; assume the rest are the same
        _tmp_ws = LoadEventNexus(runs[0],
                                 MetaDataOnly=True,
                                 EnableLogging=False)
        # The following logs should be the same for all runs
        RemoveLogs(
            _tmp_ws,
            KeepLogs=
            'HB2C:Mot:detz,HB2C:Mot:detz.RBV,HB2C:Mot:s2,HB2C:Mot:s2.RBV,'
            'HB2C:Mot:sgl,HB2C:Mot:sgl.RBV,HB2C:Mot:sgu,HB2C:Mot:sgu.RBV,'
            'run_title,start_time,experiment_identifier,HB2C:CS:CrystalAlign:UBMatrix',
            EnableLogging=False)

        time_ns_array = _tmp_ws.run().startTime().totalNanoseconds(
        ) + np.append(0,
                      np.cumsum(duration_array) * 1e9)[:-1]

        try:
            ub = np.array(re.findall(
                r'-?\d+\.*\d*',
                _tmp_ws.run().getProperty(
                    'HB2C:CS:CrystalAlign:UBMatrix').value[0]),
                          dtype=float).reshape(3, 3)
            sgl = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgl.RBV').value[0])  # 'HB2C:Mot:sgl.RBV,1,0,0,-1'
            sgu = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgu.RBV').value[0])  # 'HB2C:Mot:sgu.RBV,0,0,1,-1'
            sgl_a = np.array([[1, 0, 0], [0, np.cos(sgl),
                                          np.sin(sgl)],
                              [0, -np.sin(sgl), np.cos(sgl)]])
            sgu_a = np.array([[np.cos(sgu), np.sin(sgu), 0],
                              [-np.sin(sgu), np.cos(sgu), 0], [0, 0, 1]])
            UB = sgl_a.dot(sgu_a).dot(
                ub)  # Apply the Goniometer tilts to the UB matrix
            SetUB(_tmp_ws, UB=UB, EnableLogging=False)
        except (RuntimeError, ValueError):
            SetUB(_tmp_ws, EnableLogging=False)

        if grouping > 1:
            _tmp_group, _, _ = CreateGroupingWorkspace(InputWorkspace=_tmp_ws,
                                                       EnableLogging=False)

            group_number = 0
            for x in range(0, 480 * 8, grouping):
                for y in range(0, 512, grouping):
                    group_number += 1
                    for j in range(grouping):
                        for i in range(grouping):
                            _tmp_group.dataY(y + i +
                                             (x + j) * 512)[0] = group_number

            _tmp_ws = GroupDetectors(InputWorkspace=_tmp_ws,
                                     CopyGroupingFromWorkspace=_tmp_group,
                                     EnableLogging=False)
            DeleteWorkspace(_tmp_group, EnableLogging=False)

        progress.report('Adding logs')

        # Hack: ConvertToMD is needed so that a deep copy of the ExperimentInfo can happen
        # outWS.addExperimentInfo(_tmp_ws) # This doesn't work but should, when you delete `ws` `outWS` also loses it's ExperimentInfo
        _tmp_ws = Rebin(_tmp_ws, '0,1,2', EnableLogging=False)
        _tmp_ws = ConvertToMD(_tmp_ws,
                              dEAnalysisMode='Elastic',
                              EnableLogging=False,
                              PreprocDetectorsWS='__PreprocessedDetectorsWS')

        preprocWS = mtd['__PreprocessedDetectorsWS']
        twotheta = preprocWS.column(2)
        azimuthal = preprocWS.column(3)

        outWS.copyExperimentInfos(_tmp_ws)
        DeleteWorkspace(_tmp_ws, EnableLogging=False)
        DeleteWorkspace('__PreprocessedDetectorsWS', EnableLogging=False)
        # end Hack

        add_time_series_property('s1',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, s1_array)
        outWS.getExperimentInfo(0).run().getProperty('s1').units = 'deg'
        add_time_series_property('duration',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, duration_array)
        outWS.getExperimentInfo(0).run().getProperty(
            'duration').units = 'second'
        outWS.getExperimentInfo(0).run().addProperty('run_number',
                                                     run_number_array, True)
        add_time_series_property('monitor_count',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, monitor_count_array)
        outWS.getExperimentInfo(0).run().addProperty('twotheta', twotheta,
                                                     True)
        outWS.getExperimentInfo(0).run().addProperty('azimuthal', azimuthal,
                                                     True)

        setGoniometer_alg = self.createChildAlgorithm("SetGoniometer",
                                                      enableLogging=False)
        setGoniometer_alg.setProperty("Workspace", outWS)
        setGoniometer_alg.setProperty("Axis0", 's1,0,1,0,1')
        setGoniometer_alg.setProperty("Average", False)
        setGoniometer_alg.execute()

        return outWS
Exemplo n.º 4
0
    def __processFile(self, filename, wkspname, unfocusname, file_prog_start,
                      determineCharacterizations):
        chunks = determineChunking(filename, self.chunkSize)
        numSteps = 6  # for better progress reporting - 6 steps per chunk
        if unfocusname != '':
            numSteps = 7  # one more for accumulating the unfocused workspace
        self.log().information('Processing \'{}\' in {:d} chunks'.format(
            filename, len(chunks)))
        prog_per_chunk_step = self.prog_per_file * 1. / (numSteps *
                                                         float(len(chunks)))
        unfocusname_chunk = ''
        canSkipLoadingLogs = False

        # inner loop is over chunks
        for (j, chunk) in enumerate(chunks):
            prog_start = file_prog_start + float(j) * float(
                numSteps - 1) * prog_per_chunk_step
            chunkname = '{}_c{:d}'.format(wkspname, j)
            if unfocusname != '':  # only create unfocus chunk if needed
                unfocusname_chunk = '{}_c{:d}'.format(unfocusname, j)

            # load a chunk - this is a bit crazy long because we need to get an output property from `Load` when it
            # is run and the algorithm history doesn't exist until the parent algorithm (this) has finished
            loader = self.__createLoader(filename,
                                         chunkname,
                                         progstart=prog_start,
                                         progstop=prog_start +
                                         prog_per_chunk_step)
            if canSkipLoadingLogs:
                loader.setProperty('LoadLogs', False)
            for key, value in chunk.items():
                if isinstance(value, str):
                    loader.setPropertyValue(key, value)
                else:
                    loader.setProperty(key, value)
            loader.execute()

            # copy the necessary logs onto the workspace
            if canSkipLoadingLogs:
                CopyLogs(InputWorkspace=wkspname,
                         OutputWorkspace=chunkname,
                         MergeStrategy='WipeExisting')

            # get the underlying loader name if we used the generic one
            if self.__loaderName == 'Load':
                self.__loaderName = loader.getPropertyValue('LoaderName')
            canSkipLoadingLogs = self.__loaderName == 'LoadEventNexus'

            if determineCharacterizations and j == 0:
                self.__determineCharacterizations(
                    filename, chunkname)  # updates instance variable
                determineCharacterizations = False

            prog_start += prog_per_chunk_step
            if self.filterBadPulses > 0.:
                FilterBadPulses(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                LowerCutoff=self.filterBadPulses,
                                startProgress=prog_start,
                                endProgress=prog_start + prog_per_chunk_step)
            prog_start += prog_per_chunk_step

            # absorption correction workspace
            if self.absorption is not None and len(str(self.absorption)) > 0:
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='Wavelength',
                             EMode='Elastic')
                Divide(LHSWorkspace=chunkname,
                       RHSWorkspace=self.absorption,
                       OutputWorkspace=chunkname,
                       startProgress=prog_start,
                       endProgress=prog_start + prog_per_chunk_step)
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='TOF',
                             EMode='Elastic')
            prog_start += prog_per_chunk_step

            AlignAndFocusPowder(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                UnfocussedWorkspace=unfocusname_chunk,
                                startProgress=prog_start,
                                endProgress=prog_start +
                                2. * prog_per_chunk_step,
                                **self.kwargs)
            prog_start += 2. * prog_per_chunk_step  # AlignAndFocusPowder counts for two steps

            if j == 0:
                self.__updateAlignAndFocusArgs(chunkname)
                RenameWorkspace(InputWorkspace=chunkname,
                                OutputWorkspace=wkspname)
                if unfocusname != '':
                    RenameWorkspace(InputWorkspace=unfocusname_chunk,
                                    OutputWorkspace=unfocusname)
            else:
                RemoveLogs(
                    Workspace=chunkname)  # accumulation has them already
                Plus(LHSWorkspace=wkspname,
                     RHSWorkspace=chunkname,
                     OutputWorkspace=wkspname,
                     ClearRHSWorkspace=self.kwargs['PreserveEvents'],
                     startProgress=prog_start,
                     endProgress=prog_start + prog_per_chunk_step)
                DeleteWorkspace(Workspace=chunkname)

                if unfocusname != '':
                    RemoveLogs(Workspace=unfocusname_chunk
                               )  # accumulation has them already
                    Plus(LHSWorkspace=unfocusname,
                         RHSWorkspace=unfocusname_chunk,
                         OutputWorkspace=unfocusname,
                         ClearRHSWorkspace=self.kwargs['PreserveEvents'],
                         startProgress=prog_start,
                         endProgress=prog_start + prog_per_chunk_step)
                    DeleteWorkspace(Workspace=unfocusname_chunk)

                if self.kwargs['PreserveEvents'] and self.kwargs[
                        'CompressTolerance'] > 0.:
                    CompressEvents(InputWorkspace=wkspname,
                                   OutputWorkspace=wkspname,
                                   WallClockTolerance=self.
                                   kwargs['CompressWallClockTolerance'],
                                   Tolerance=self.kwargs['CompressTolerance'],
                                   StartTime=self.kwargs['CompressStartTime'])
Exemplo n.º 5
0
from mantid.simpleapi import Load, mtd, CloneWorkspace, Integration, SaveNexus, RemoveLogs
import numpy as np

ws_list = np.genfromtxt('/SNS/users/rwp/corelli/tube_calibration/list',
                        delimiter=',',
                        dtype=[('runs', '|S11'), ('banks', '5i8'),
                               ('height', 'i8')])

for run, banks, height in ws_list:
    banks = np.asarray(banks)
    banks = banks[np.nonzero(banks)]
    bank_names = ','.join('bank' + str(b) for b in banks)
    data = Load(Filename='CORELLI_' + run,
                BankName=bank_names,
                SingleBankPixelsOnly=False)
    pc = sum(data.getRun()['proton_charge'].value)
    data = Integration(data)
    data /= pc
    RemoveLogs(data)
    if 'accum' in mtd:
        accum += data
    else:
        accum = CloneWorkspace(data)

SaveNexus(accum, '/SNS/users/rwp/corelli/tube_calibration/all_banks.nxs')