def processData(self, filename, wsName):
     if filename != '':
         if self._SystemTest:
             Load(Filename=filename, OutputWorkspace=wsName, BankName = 'bank22')
         else:
             Load(Filename=filename, OutputWorkspace=wsName)
     FindDetectorsPar(InputWorkspace=wsName,
                      ReturnLinearRanges=self._returnLinearRanges,
                      ParFile=self._parFile,
                      OutputParTable=self._outputParTable)
     FilterBadPulses(InputWorkspace=wsName, Outputworkspace=wsName, LowerCutoff=self._lowerCutoff)
     RemovePromptPulse(InputWorkspace=wsName, OutputWorkspace=wsName, Width=self._width, Frequency=self._frequency)
     LoadDiffCal(InputWorkspace=wsName,
                 InstrumentName=self._instrumentName,
                 InstrumentFilename=self._instrumentFilename,
                 Filename=self._filename,
                 MakeGroupingWorkspace=self._makeGroupingWorkspace,
                 MakeCalWorkspace=self._makeCalWorkspace,
                 MakeMaskWorkspace=self._makeMaskWorkspace,
                 WorkspaceName=self._workspaceName,
                 TofMin=self._tofMin,
                 TofMax=self._tofMax,
                 FixConversionIssues=self._fixConversionIssues)
     MaskDetectors(Workspace=wsName,
                   SpectraList=self._spectraList,
                   DetectorList=self._detectorList,
                   WorkspaceIndexList=self._workspaceIndexList,
                   MaskedWorkspace=self._maskedWorkspace,
                   ForceInstrumentMasking=self._forceInstrumentMasking,
                   StartWorkspaceIndex=self._startWorkspaceIndex,
                   EndWorkspaceIndex=self._endWorkspaceIndex,
                   ComponentList=self._componentList)
     AlignDetectors(InputWorkspace=wsName, OutputWorkspace=wsName, CalibrationFile=self._calibrationFile)
     ConvertUnits(InputWorkspace=wsName, OutputWorkspace=wsName, Target='Wavelength')
Beispiel #2
0
    def __processFile(self, filename, wkspname, file_prog_start, determineCharacterizations):
        chunks = determineChunking(filename, self.chunkSize)
        self.log().information('Processing \'%s\' in %d chunks' % (filename, len(chunks)))
        prog_per_chunk_step = self.prog_per_file * 1./(6.*float(len(chunks))) # for better progress reporting - 6 steps per chunk

        # inner loop is over chunks
        for (j, chunk) in enumerate(chunks):
            prog_start = file_prog_start + float(j) * 5. * prog_per_chunk_step
            chunkname = "%s_c%d" % (wkspname, j)
            Load(Filename=filename, OutputWorkspace=chunkname,
                 startProgress=prog_start, endProgress=prog_start+prog_per_chunk_step,
                 **chunk)
            if determineCharacterizations:
                self.__determineCharacterizations(filename, chunkname, False) # updates instance variable
                determineCharacterizations = False

            prog_start += prog_per_chunk_step
            if self.filterBadPulses > 0.:
                FilterBadPulses(InputWorkspace=chunkname, OutputWorkspace=chunkname,
                                LowerCutoff=self.filterBadPulses,
                                startProgress=prog_start, endProgress=prog_start+prog_per_chunk_step)
            prog_start += prog_per_chunk_step

            # absorption correction workspace
            if self.absorption is not None and len(str(self.absorption)) > 0:
                ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname,
                             Target='Wavelength', EMode='Elastic')
                Divide(LHSWorkspace=chunkname, RHSWorkspace=self.absorption, OutputWorkspace=chunkname,
                       startProgress=prog_start, endProgress=prog_start+prog_per_chunk_step)
                ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname,
                             Target='TOF', EMode='Elastic')
            prog_start += prog_per_chunk_step

            AlignAndFocusPowder(InputWorkspace=chunkname, OutputWorkspace=chunkname,
                                startProgress=prog_start, endProgress=prog_start+2.*prog_per_chunk_step,
                                **self.kwargs)
            prog_start += 2.*prog_per_chunk_step # AlignAndFocusPowder counts for two steps

            if j == 0:
                self.__updateAlignAndFocusArgs(chunkname)
                RenameWorkspace(InputWorkspace=chunkname, OutputWorkspace=wkspname)
            else:
                Plus(LHSWorkspace=wkspname, RHSWorkspace=chunkname, OutputWorkspace=wkspname,
                     ClearRHSWorkspace=self.kwargs['PreserveEvents'],
                     startProgress=prog_start, endProgress=prog_start+prog_per_chunk_step)
                DeleteWorkspace(Workspace=chunkname)
                if self.kwargs['PreserveEvents']:
                    CompressEvents(InputWorkspace=wkspname, OutputWorkspace=wkspname)
Beispiel #3
0
    def __processFile(self, filename, file_prog_start,
                      determineCharacterizations,
                      createUnfocused):  # noqa: C902,C901
        # create a unique name for the workspace
        wkspname = '__' + self.__wkspNameFromFile(filename)
        wkspname += '_f%d' % self._filenames.index(
            filename)  # add file number to be unique
        unfocusname = ''
        if createUnfocused:
            unfocusname = wkspname + '_unfocused'

        # check for a cachefilename
        cachefile = self.__getCacheName(self.__wkspNameFromFile(filename))
        self.log().information('looking for cachefile "{}"'.format(cachefile))
        if (not createUnfocused
            ) and self.useCaching and os.path.exists(cachefile):
            try:
                if self.__loadCacheFile(cachefile, wkspname):
                    return wkspname, ''
            except RuntimeError as e:
                # log as a warning and carry on as though the cache file didn't exist
                self.log().warning('Failed to load cache file "{}": {}'.format(
                    cachefile, e))
        else:
            self.log().information('not using cache')

        chunks = determineChunking(filename, self.chunkSize)
        numSteps = 6  # for better progress reporting - 6 steps per chunk
        if createUnfocused:
            numSteps = 7  # one more for accumulating the unfocused workspace
        self.log().information('Processing \'{}\' in {:d} chunks'.format(
            filename, len(chunks)))
        prog_per_chunk_step = self.prog_per_file * 1. / (numSteps *
                                                         float(len(chunks)))

        unfocusname_chunk = ''
        canSkipLoadingLogs = False

        # inner loop is over chunks
        haveAccumulationForFile = False
        for (j, chunk) in enumerate(chunks):
            prog_start = file_prog_start + float(j) * float(
                numSteps - 1) * prog_per_chunk_step

            # if reading all at once, put the data into the final name directly
            if len(chunks) == 1:
                chunkname = wkspname
                unfocusname_chunk = unfocusname
            else:
                chunkname = '{}_c{:d}'.format(wkspname, j)
                if unfocusname:  # only create unfocus chunk if needed
                    unfocusname_chunk = '{}_c{:d}'.format(unfocusname, j)

            # load a chunk - this is a bit crazy long because we need to get an output property from `Load` when it
            # is run and the algorithm history doesn't exist until the parent algorithm (this) has finished
            loader = self.__createLoader(
                filename,
                chunkname,
                skipLoadingLogs=(len(chunks) > 1 and canSkipLoadingLogs
                                 and haveAccumulationForFile),
                progstart=prog_start,
                progstop=prog_start + prog_per_chunk_step,
                **chunk)
            loader.execute()
            if j == 0:
                self.__setupCalibration(chunkname)

            # copy the necessary logs onto the workspace
            if len(chunks
                   ) > 1 and canSkipLoadingLogs and haveAccumulationForFile:
                CopyLogs(InputWorkspace=wkspname,
                         OutputWorkspace=chunkname,
                         MergeStrategy='WipeExisting')
                # re-load instrument so detector positions that depend on logs get initialized
                try:
                    LoadIDFFromNexus(Workspace=chunkname,
                                     Filename=filename,
                                     InstrumentParentPath='/entry')
                except RuntimeError as e:
                    self.log().warning(
                        'Reloading instrument using "LoadIDFFromNexus" failed: {}'
                        .format(e))

            # get the underlying loader name if we used the generic one
            if self.__loaderName == 'Load':
                self.__loaderName = loader.getPropertyValue('LoaderName')
            # only LoadEventNexus can turn off loading logs, but FilterBadPulses
            # requires them to be loaded from the file
            canSkipLoadingLogs = self.__loaderName == 'LoadEventNexus' and self.filterBadPulses <= 0. and haveAccumulationForFile

            if determineCharacterizations and j == 0:
                self.__determineCharacterizations(
                    filename, chunkname)  # updates instance variable
                determineCharacterizations = False

            if self.__loaderName == 'LoadEventNexus' and mtd[
                    chunkname].getNumberEvents() == 0:
                self.log().notice(
                    'Chunk {} of {} contained no events. Skipping to next chunk.'
                    .format(j + 1, len(chunks)))
                continue

            prog_start += prog_per_chunk_step
            if self.filterBadPulses > 0.:
                FilterBadPulses(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                LowerCutoff=self.filterBadPulses,
                                startProgress=prog_start,
                                endProgress=prog_start + prog_per_chunk_step)
                if mtd[chunkname].getNumberEvents() == 0:
                    msg = 'FilterBadPulses removed all events from '
                    if len(chunks) == 1:
                        raise RuntimeError(msg + filename)
                    else:
                        raise RuntimeError(msg + 'chunk {} of {} in {}'.format(
                            j, len(chunks), filename))

            prog_start += prog_per_chunk_step

            # absorption correction workspace
            if self.absorption is not None and len(str(self.absorption)) > 0:
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='Wavelength',
                             EMode='Elastic')
                # rebin the absorption correction to match the binning of the inputs if in histogram mode
                # EventWorkspace will compare the wavelength of each individual event
                absWksp = self.absorption
                if mtd[chunkname].id() != 'EventWorkspace':
                    absWksp = '__absWkspRebinned'
                    RebinToWorkspace(WorkspaceToRebin=self.absorption,
                                     WorkspaceToMatch=chunkname,
                                     OutputWorkspace=absWksp)
                Divide(LHSWorkspace=chunkname,
                       RHSWorkspace=absWksp,
                       OutputWorkspace=chunkname,
                       startProgress=prog_start,
                       endProgress=prog_start + prog_per_chunk_step)
                if absWksp != self.absorption:  # clean up
                    DeleteWorkspace(Workspace=absWksp)
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='TOF',
                             EMode='Elastic')
            prog_start += prog_per_chunk_step

            if self.kwargs is None:
                raise RuntimeError(
                    'Somehow arguments for "AlignAndFocusPowder" aren\'t set')

            AlignAndFocusPowder(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                UnfocussedWorkspace=unfocusname_chunk,
                                startProgress=prog_start,
                                endProgress=prog_start +
                                2. * prog_per_chunk_step,
                                **self.kwargs)
            prog_start += 2. * prog_per_chunk_step  # AlignAndFocusPowder counts for two steps

            self.__accumulate(chunkname,
                              wkspname,
                              unfocusname_chunk,
                              unfocusname,
                              not haveAccumulationForFile,
                              removelogs=canSkipLoadingLogs)

            haveAccumulationForFile = True
        # end of inner loop
        if not mtd.doesExist(wkspname):
            raise RuntimeError(
                'Failed to process any data from file "{}"'.format(filename))

        # copy the sample object from the absorption workspace
        if self.absorption is not None and len(str(self.absorption)) > 0:
            CopySample(InputWorkspace=self.absorption,
                       OutputWorkspace=wkspname,
                       CopyEnvironment=False)

        # write out the cachefile for the main reduced data independent of whether
        # the unfocussed workspace was requested
        if self.useCaching and not os.path.exists(cachefile):
            self.log().information(
                'Saving data to cachefile "{}"'.format(cachefile))
            SaveNexusProcessed(InputWorkspace=wkspname, Filename=cachefile)

        return wkspname, unfocusname
Beispiel #4
0
    def __processFile(self, filename, wkspname, unfocusname, file_prog_start,
                      determineCharacterizations):
        chunks = determineChunking(filename, self.chunkSize)
        numSteps = 6  # for better progress reporting - 6 steps per chunk
        if unfocusname != '':
            numSteps = 7  # one more for accumulating the unfocused workspace
        self.log().information('Processing \'{}\' in {:d} chunks'.format(
            filename, len(chunks)))
        prog_per_chunk_step = self.prog_per_file * 1. / (numSteps *
                                                         float(len(chunks)))
        unfocusname_chunk = ''

        # inner loop is over chunks
        for (j, chunk) in enumerate(chunks):
            prog_start = file_prog_start + float(j) * float(
                numSteps - 1) * prog_per_chunk_step
            chunkname = '{}_c{:d}'.format(wkspname, j)
            if unfocusname != '':  # only create unfocus chunk if needed
                unfocusname_chunk = '{}_c{:d}'.format(unfocusname, j)

            Load(Filename=filename,
                 OutputWorkspace=chunkname,
                 startProgress=prog_start,
                 endProgress=prog_start + prog_per_chunk_step,
                 **chunk)
            if determineCharacterizations:
                self.__determineCharacterizations(
                    filename, chunkname, False)  # updates instance variable
                determineCharacterizations = False

            prog_start += prog_per_chunk_step
            if self.filterBadPulses > 0.:
                FilterBadPulses(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                LowerCutoff=self.filterBadPulses,
                                startProgress=prog_start,
                                endProgress=prog_start + prog_per_chunk_step)
            prog_start += prog_per_chunk_step

            # absorption correction workspace
            if self.absorption is not None and len(str(self.absorption)) > 0:
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='Wavelength',
                             EMode='Elastic')
                Divide(LHSWorkspace=chunkname,
                       RHSWorkspace=self.absorption,
                       OutputWorkspace=chunkname,
                       startProgress=prog_start,
                       endProgress=prog_start + prog_per_chunk_step)
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='TOF',
                             EMode='Elastic')
            prog_start += prog_per_chunk_step

            AlignAndFocusPowder(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                UnfocussedWorkspace=unfocusname_chunk,
                                startProgress=prog_start,
                                endProgress=prog_start +
                                2. * prog_per_chunk_step,
                                **self.kwargs)
            prog_start += 2. * prog_per_chunk_step  # AlignAndFocusPowder counts for two steps

            if j == 0:
                self.__updateAlignAndFocusArgs(chunkname)
                RenameWorkspace(InputWorkspace=chunkname,
                                OutputWorkspace=wkspname)
                if unfocusname != '':
                    RenameWorkspace(InputWorkspace=unfocusname_chunk,
                                    OutputWorkspace=unfocusname)
            else:
                Plus(LHSWorkspace=wkspname,
                     RHSWorkspace=chunkname,
                     OutputWorkspace=wkspname,
                     ClearRHSWorkspace=self.kwargs['PreserveEvents'],
                     startProgress=prog_start,
                     endProgress=prog_start + prog_per_chunk_step)
                DeleteWorkspace(Workspace=chunkname)

                if unfocusname != '':
                    Plus(LHSWorkspace=unfocusname,
                         RHSWorkspace=unfocusname_chunk,
                         OutputWorkspace=unfocusname,
                         ClearRHSWorkspace=self.kwargs['PreserveEvents'],
                         startProgress=prog_start,
                         endProgress=prog_start + prog_per_chunk_step)
                    DeleteWorkspace(Workspace=unfocusname_chunk)

                if self.kwargs['PreserveEvents']:
                    CompressEvents(InputWorkspace=wkspname,
                                   OutputWorkspace=wkspname)
Beispiel #5
0
    def __processFile(self, filename, wkspname, unfocusname, file_prog_start,
                      determineCharacterizations):
        chunks = determineChunking(filename, self.chunkSize)
        numSteps = 6  # for better progress reporting - 6 steps per chunk
        if unfocusname != '':
            numSteps = 7  # one more for accumulating the unfocused workspace
        self.log().information('Processing \'{}\' in {:d} chunks'.format(
            filename, len(chunks)))
        prog_per_chunk_step = self.prog_per_file * 1. / (numSteps *
                                                         float(len(chunks)))
        unfocusname_chunk = ''
        canSkipLoadingLogs = False

        # inner loop is over chunks
        for (j, chunk) in enumerate(chunks):
            prog_start = file_prog_start + float(j) * float(
                numSteps - 1) * prog_per_chunk_step
            chunkname = '{}_c{:d}'.format(wkspname, j)
            if unfocusname != '':  # only create unfocus chunk if needed
                unfocusname_chunk = '{}_c{:d}'.format(unfocusname, j)

            # load a chunk - this is a bit crazy long because we need to get an output property from `Load` when it
            # is run and the algorithm history doesn't exist until the parent algorithm (this) has finished
            loader = self.__createLoader(filename,
                                         chunkname,
                                         progstart=prog_start,
                                         progstop=prog_start +
                                         prog_per_chunk_step)
            if canSkipLoadingLogs:
                loader.setProperty('LoadLogs', False)
            for key, value in chunk.items():
                if isinstance(value, str):
                    loader.setPropertyValue(key, value)
                else:
                    loader.setProperty(key, value)
            loader.execute()

            # copy the necessary logs onto the workspace
            if canSkipLoadingLogs:
                CopyLogs(InputWorkspace=wkspname,
                         OutputWorkspace=chunkname,
                         MergeStrategy='WipeExisting')

            # get the underlying loader name if we used the generic one
            if self.__loaderName == 'Load':
                self.__loaderName = loader.getPropertyValue('LoaderName')
            canSkipLoadingLogs = self.__loaderName == 'LoadEventNexus'

            if determineCharacterizations and j == 0:
                self.__determineCharacterizations(
                    filename, chunkname)  # updates instance variable
                determineCharacterizations = False

            prog_start += prog_per_chunk_step
            if self.filterBadPulses > 0.:
                FilterBadPulses(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                LowerCutoff=self.filterBadPulses,
                                startProgress=prog_start,
                                endProgress=prog_start + prog_per_chunk_step)
            prog_start += prog_per_chunk_step

            # absorption correction workspace
            if self.absorption is not None and len(str(self.absorption)) > 0:
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='Wavelength',
                             EMode='Elastic')
                Divide(LHSWorkspace=chunkname,
                       RHSWorkspace=self.absorption,
                       OutputWorkspace=chunkname,
                       startProgress=prog_start,
                       endProgress=prog_start + prog_per_chunk_step)
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='TOF',
                             EMode='Elastic')
            prog_start += prog_per_chunk_step

            AlignAndFocusPowder(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                UnfocussedWorkspace=unfocusname_chunk,
                                startProgress=prog_start,
                                endProgress=prog_start +
                                2. * prog_per_chunk_step,
                                **self.kwargs)
            prog_start += 2. * prog_per_chunk_step  # AlignAndFocusPowder counts for two steps

            if j == 0:
                self.__updateAlignAndFocusArgs(chunkname)
                RenameWorkspace(InputWorkspace=chunkname,
                                OutputWorkspace=wkspname)
                if unfocusname != '':
                    RenameWorkspace(InputWorkspace=unfocusname_chunk,
                                    OutputWorkspace=unfocusname)
            else:
                RemoveLogs(
                    Workspace=chunkname)  # accumulation has them already
                Plus(LHSWorkspace=wkspname,
                     RHSWorkspace=chunkname,
                     OutputWorkspace=wkspname,
                     ClearRHSWorkspace=self.kwargs['PreserveEvents'],
                     startProgress=prog_start,
                     endProgress=prog_start + prog_per_chunk_step)
                DeleteWorkspace(Workspace=chunkname)

                if unfocusname != '':
                    RemoveLogs(Workspace=unfocusname_chunk
                               )  # accumulation has them already
                    Plus(LHSWorkspace=unfocusname,
                         RHSWorkspace=unfocusname_chunk,
                         OutputWorkspace=unfocusname,
                         ClearRHSWorkspace=self.kwargs['PreserveEvents'],
                         startProgress=prog_start,
                         endProgress=prog_start + prog_per_chunk_step)
                    DeleteWorkspace(Workspace=unfocusname_chunk)

                if self.kwargs['PreserveEvents'] and self.kwargs[
                        'CompressTolerance'] > 0.:
                    CompressEvents(InputWorkspace=wkspname,
                                   OutputWorkspace=wkspname,
                                   WallClockTolerance=self.
                                   kwargs['CompressWallClockTolerance'],
                                   Tolerance=self.kwargs['CompressTolerance'],
                                   StartTime=self.kwargs['CompressStartTime'])