def PyExec(self): self._loadCharacterizations() charac = "" if mtd.doesExist("characterizations"): charac = "characterizations" # arguments for both AlignAndFocusPowder and AlignAndFocusPowderFromFiles self._alignArgs['OutputWorkspace'] = self.getPropertyValue("OutputWorkspace") self._alignArgs['RemovePromptPulseWidth'] = self.getProperty("RemovePromptPulseWidth").value self._alignArgs['CompressTolerance'] = COMPRESS_TOL_TOF self._alignArgs['PreserveEvents'] = True self._alignArgs['CalFileName'] = self.getProperty("CalibrationFile").value self._alignArgs['Params']=self.getProperty("Binning").value self._alignArgs['ResampleX']=self.getProperty("ResampleX").value self._alignArgs['Dspacing']=True self._alignArgs['CropWavelengthMin'] = self.getProperty('CropWavelengthMin').value self._alignArgs['CropWavelengthMax'] = self.getProperty('CropWavelengthMax').value self._alignArgs['ReductionProperties'] = '__snspowderreduction' wksp = self.getProperty("InputWorkspace").value if wksp is None: # run from file with caching wksp = AlignAndFocusPowderFromFiles(Filename=self.getProperty("Filename").value, CacheDir=self.getProperty("CacheDir").value, MaxChunkSize=self.getProperty("MaxChunkSize").value, FilterBadPulses=self.getProperty("FilterBadPulses").value, Characterizations=charac, FrequencyLogNames=self.getProperty("FrequencyLogNames").value, WaveLengthLogNames=self.getProperty("WaveLengthLogNames").value, **(self._alignArgs)) else: # process the input workspace self.log().information("Using input workspace. Ignoring properties 'Filename', " + "'OutputWorkspace', 'MaxChunkSize', and 'FilterBadPulses'") # get the correct row of the table PDDetermineCharacterizations(InputWorkspace=wksp, Characterizations=charac, ReductionProperties="__snspowderreduction", FrequencyLogNames=self.getProperty("FrequencyLogNames").value, WaveLengthLogNames=self.getProperty("WaveLengthLogNames").value) wksp = AlignAndFocusPowder(InputWorkspace=wksp, **(self._alignArgs)) wksp = NormaliseByCurrent(InputWorkspace=wksp, OutputWorkspace=wksp) wksp.getRun()['gsas_monitor'] = 1 if self._iparmFile is not None: wksp.getRun()['iparm_file'] = self._iparmFile wksp = SetUncertainties(InputWorkspace=wksp, OutputWorkspace=wksp, SetError="sqrtOrOne") SaveGSS(InputWorkspace=wksp, Filename=self.getProperty("PDFgetNFile").value, SplitFiles=False, Append=False, MultiplyByBinWidth=False, Bank=mantid.pmds["__snspowderreduction"]["bank"].value, Format="SLOG", ExtendedHeader=True) self.setProperty("OutputWorkspace", wksp)
def _alignAndFocus(self, filename, wkspname, detCalFilename, withUnfocussed, progStart, progDelta): # create the unfocussed name if withUnfocussed: unfocussed = wkspname.replace('_red', '') unfocussed = unfocussed + '_d' else: unfocussed = '' # process the data if detCalFilename: progEnd = progStart + .45 * progDelta # have to load and override the instrument here Load(Filename=filename, OutputWorkspace=wkspname, startProgress=progStart, endProgress=progEnd) progStart = progEnd progEnd += .45 * progDelta LoadIsawDetCal(InputWorkspace=wkspname, Filename=detCalFilename) AlignAndFocusPowder( InputWorkspace=wkspname, OutputWorkspace=wkspname, UnfocussedWorkspace=unfocussed, # can be empty string startProgress=progStart, endProgress=progEnd, **self.alignAndFocusArgs) progStart = progEnd else: progEnd = progStart + .9 * progDelta # pass all of the work to the child algorithm AlignAndFocusPowderFromFiles( Filename=filename, OutputWorkspace=wkspname, MaxChunkSize=self.chunkSize, UnfocussedWorkspace=unfocussed, # can be empty string startProgress=progStart, endProgress=progEnd, **self.alignAndFocusArgs) progStart = progEnd progEnd = progStart + .1 * progDelta NormaliseByCurrent(InputWorkspace=wkspname, OutputWorkspace=wkspname, startProgress=progStart, endProgress=progEnd) return wkspname, unfocussed
def __processFile(self, filename, wkspname, file_prog_start, determineCharacterizations): chunks = determineChunking(filename, self.chunkSize) self.log().information('Processing \'%s\' in %d chunks' % (filename, len(chunks))) prog_per_chunk_step = self.prog_per_file * 1./(6.*float(len(chunks))) # for better progress reporting - 6 steps per chunk # inner loop is over chunks for (j, chunk) in enumerate(chunks): prog_start = file_prog_start + float(j) * 5. * prog_per_chunk_step chunkname = "%s_c%d" % (wkspname, j) Load(Filename=filename, OutputWorkspace=chunkname, startProgress=prog_start, endProgress=prog_start+prog_per_chunk_step, **chunk) if determineCharacterizations: self.__determineCharacterizations(filename, chunkname, False) # updates instance variable determineCharacterizations = False prog_start += prog_per_chunk_step if self.filterBadPulses > 0.: FilterBadPulses(InputWorkspace=chunkname, OutputWorkspace=chunkname, LowerCutoff=self.filterBadPulses, startProgress=prog_start, endProgress=prog_start+prog_per_chunk_step) prog_start += prog_per_chunk_step # absorption correction workspace if self.absorption is not None and len(str(self.absorption)) > 0: ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname, Target='Wavelength', EMode='Elastic') Divide(LHSWorkspace=chunkname, RHSWorkspace=self.absorption, OutputWorkspace=chunkname, startProgress=prog_start, endProgress=prog_start+prog_per_chunk_step) ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname, Target='TOF', EMode='Elastic') prog_start += prog_per_chunk_step AlignAndFocusPowder(InputWorkspace=chunkname, OutputWorkspace=chunkname, startProgress=prog_start, endProgress=prog_start+2.*prog_per_chunk_step, **self.kwargs) prog_start += 2.*prog_per_chunk_step # AlignAndFocusPowder counts for two steps if j == 0: self.__updateAlignAndFocusArgs(chunkname) RenameWorkspace(InputWorkspace=chunkname, OutputWorkspace=wkspname) else: Plus(LHSWorkspace=wkspname, RHSWorkspace=chunkname, OutputWorkspace=wkspname, ClearRHSWorkspace=self.kwargs['PreserveEvents'], startProgress=prog_start, endProgress=prog_start+prog_per_chunk_step) DeleteWorkspace(Workspace=chunkname) if self.kwargs['PreserveEvents']: CompressEvents(InputWorkspace=wkspname, OutputWorkspace=wkspname)
def live_reduce(self, input_ws, output_ws): ws = input_ws counter_ws = mtd['counter'] index = int(counter_ws.readX(0)[0]) print('index = ', index) counter_ws.dataX(0)[0] += 1 print('Iteration {0}: Number of events = {1}'.format(index, ws.getNumberEvents())) curr_ws_name = 'output_{0}'.format(index) CloneWorkspace(InputWorkspace=input_ws, OutputWorkspace=curr_ws_name) Rebin(InputWorkspace=input_ws, OutputWorkspace=output_ws, Params='5000., -0.001, 50000.') AlignAndFocusPowder(InputWorkspace=mtd[curr_ws_name], OutputWorkspace=curr_ws_name, CalFileName='/SNS/VULCAN/shared/CALIBRATION/2017_8_11_CAL/VULCAN_calibrate_2017_08_17.h5', Params='-0.001', DMin='0.5', DMax='3.5', PreserveEvents=False) # PrimaryFlightPath=43, SpectrumIDs='0-2', L2='2,2,2', Polar='90,270,145', Azimuthal='0, 0, 0') print('[SpecialDebug] Interface... EditInstrument on {0}'.format(curr_ws_name)) EditInstrumentGeometry(Workspace=curr_ws_name, PrimaryFlightPath=43.753999999999998, SpectrumIDs='1,2,3', L2='2.00944,2.00944,2.00944', Polar='90,270,150')
def __processFile(self, filename, file_prog_start, determineCharacterizations, createUnfocused): # noqa: C902,C901 # create a unique name for the workspace wkspname = '__' + self.__wkspNameFromFile(filename) wkspname += '_f%d' % self._filenames.index( filename) # add file number to be unique unfocusname = '' if createUnfocused: unfocusname = wkspname + '_unfocused' # check for a cachefilename cachefile = self.__getCacheName(self.__wkspNameFromFile(filename)) self.log().information('looking for cachefile "{}"'.format(cachefile)) if (not createUnfocused ) and self.useCaching and os.path.exists(cachefile): try: if self.__loadCacheFile(cachefile, wkspname): return wkspname, '' except RuntimeError as e: # log as a warning and carry on as though the cache file didn't exist self.log().warning('Failed to load cache file "{}": {}'.format( cachefile, e)) else: self.log().information('not using cache') chunks = determineChunking(filename, self.chunkSize) numSteps = 6 # for better progress reporting - 6 steps per chunk if createUnfocused: numSteps = 7 # one more for accumulating the unfocused workspace self.log().information('Processing \'{}\' in {:d} chunks'.format( filename, len(chunks))) prog_per_chunk_step = self.prog_per_file * 1. / (numSteps * float(len(chunks))) unfocusname_chunk = '' canSkipLoadingLogs = False # inner loop is over chunks haveAccumulationForFile = False for (j, chunk) in enumerate(chunks): prog_start = file_prog_start + float(j) * float( numSteps - 1) * prog_per_chunk_step # if reading all at once, put the data into the final name directly if len(chunks) == 1: chunkname = wkspname unfocusname_chunk = unfocusname else: chunkname = '{}_c{:d}'.format(wkspname, j) if unfocusname: # only create unfocus chunk if needed unfocusname_chunk = '{}_c{:d}'.format(unfocusname, j) # load a chunk - this is a bit crazy long because we need to get an output property from `Load` when it # is run and the algorithm history doesn't exist until the parent algorithm (this) has finished loader = self.__createLoader( filename, chunkname, skipLoadingLogs=(len(chunks) > 1 and canSkipLoadingLogs and haveAccumulationForFile), progstart=prog_start, progstop=prog_start + prog_per_chunk_step, **chunk) loader.execute() if j == 0: self.__setupCalibration(chunkname) # copy the necessary logs onto the workspace if len(chunks ) > 1 and canSkipLoadingLogs and haveAccumulationForFile: CopyLogs(InputWorkspace=wkspname, OutputWorkspace=chunkname, MergeStrategy='WipeExisting') # re-load instrument so detector positions that depend on logs get initialized try: LoadIDFFromNexus(Workspace=chunkname, Filename=filename, InstrumentParentPath='/entry') except RuntimeError as e: self.log().warning( 'Reloading instrument using "LoadIDFFromNexus" failed: {}' .format(e)) # get the underlying loader name if we used the generic one if self.__loaderName == 'Load': self.__loaderName = loader.getPropertyValue('LoaderName') # only LoadEventNexus can turn off loading logs, but FilterBadPulses # requires them to be loaded from the file canSkipLoadingLogs = self.__loaderName == 'LoadEventNexus' and self.filterBadPulses <= 0. and haveAccumulationForFile if determineCharacterizations and j == 0: self.__determineCharacterizations( filename, chunkname) # updates instance variable determineCharacterizations = False if self.__loaderName == 'LoadEventNexus' and mtd[ chunkname].getNumberEvents() == 0: self.log().notice( 'Chunk {} of {} contained no events. Skipping to next chunk.' .format(j + 1, len(chunks))) continue prog_start += prog_per_chunk_step if self.filterBadPulses > 0.: FilterBadPulses(InputWorkspace=chunkname, OutputWorkspace=chunkname, LowerCutoff=self.filterBadPulses, startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) if mtd[chunkname].getNumberEvents() == 0: msg = 'FilterBadPulses removed all events from ' if len(chunks) == 1: raise RuntimeError(msg + filename) else: raise RuntimeError(msg + 'chunk {} of {} in {}'.format( j, len(chunks), filename)) prog_start += prog_per_chunk_step # absorption correction workspace if self.absorption is not None and len(str(self.absorption)) > 0: ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname, Target='Wavelength', EMode='Elastic') # rebin the absorption correction to match the binning of the inputs if in histogram mode # EventWorkspace will compare the wavelength of each individual event absWksp = self.absorption if mtd[chunkname].id() != 'EventWorkspace': absWksp = '__absWkspRebinned' RebinToWorkspace(WorkspaceToRebin=self.absorption, WorkspaceToMatch=chunkname, OutputWorkspace=absWksp) Divide(LHSWorkspace=chunkname, RHSWorkspace=absWksp, OutputWorkspace=chunkname, startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) if absWksp != self.absorption: # clean up DeleteWorkspace(Workspace=absWksp) ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname, Target='TOF', EMode='Elastic') prog_start += prog_per_chunk_step if self.kwargs is None: raise RuntimeError( 'Somehow arguments for "AlignAndFocusPowder" aren\'t set') AlignAndFocusPowder(InputWorkspace=chunkname, OutputWorkspace=chunkname, UnfocussedWorkspace=unfocusname_chunk, startProgress=prog_start, endProgress=prog_start + 2. * prog_per_chunk_step, **self.kwargs) prog_start += 2. * prog_per_chunk_step # AlignAndFocusPowder counts for two steps self.__accumulate(chunkname, wkspname, unfocusname_chunk, unfocusname, not haveAccumulationForFile, removelogs=canSkipLoadingLogs) haveAccumulationForFile = True # end of inner loop if not mtd.doesExist(wkspname): raise RuntimeError( 'Failed to process any data from file "{}"'.format(filename)) # copy the sample object from the absorption workspace if self.absorption is not None and len(str(self.absorption)) > 0: CopySample(InputWorkspace=self.absorption, OutputWorkspace=wkspname, CopyEnvironment=False) # write out the cachefile for the main reduced data independent of whether # the unfocussed workspace was requested if self.useCaching and not os.path.exists(cachefile): self.log().information( 'Saving data to cachefile "{}"'.format(cachefile)) SaveNexusProcessed(InputWorkspace=wkspname, Filename=cachefile) return wkspname, unfocusname
def PyExec(self): in_Runs = self.getProperty("RunNumbers").value maskWSname = self._getMaskWSname() progress = Progress(self, 0., .25, 3) # default arguments for AlignAndFocusPowder alignAndFocusArgs = { 'TMax': 50000, 'RemovePromptPulseWidth': 1600, 'PreserveEvents': False, 'Dspacing': True, # binning parameters in d-space 'Params': self.getProperty("Binning").value } # workspace for loading metadata only to be used in LoadDiffCal and # CreateGroupingWorkspace metaWS = None # either type of file-based calibration is stored in the same variable calib = self.getProperty("Calibration").value detcalFile = None if calib == "Calibration File": metaWS = self._loadMetaWS(in_Runs[0]) LoadDiffCal(Filename=self.getPropertyValue("CalibrationFilename"), WorkspaceName='SNAP', InputWorkspace=metaWS, MakeGroupingWorkspace=False, MakeMaskWorkspace=False) alignAndFocusArgs['CalibrationWorkspace'] = 'SNAP_cal' elif calib == 'DetCal File': detcalFile = ','.join(self.getProperty('DetCalFilename').value) progress.report('loaded calibration') norm = self.getProperty("Normalization").value if norm == "From Processed Nexus": norm_File = self.getProperty("NormalizationFilename").value normalizationWS = 'normWS' LoadNexusProcessed(Filename=norm_File, OutputWorkspace=normalizationWS) progress.report('loaded normalization') elif norm == "From Workspace": normalizationWS = str( self.getProperty("NormalizationWorkspace").value) progress.report('') else: normalizationWS = None progress.report('') group = self._generateGrouping(in_Runs[0], metaWS, progress) if metaWS is not None: DeleteWorkspace(Workspace=metaWS) Process_Mode = self.getProperty("ProcessingMode").value prefix = self.getProperty("OptionalPrefix").value # --------------------------- REDUCE DATA ----------------------------- Tag = 'SNAP' if self.getProperty("LiveData").value: Tag = 'Live' progStart = .25 progDelta = (1. - progStart) / len(in_Runs) for i, runnumber in enumerate(in_Runs): self.log().notice("processing run %s" % runnumber) self.log().information(str(self.get_IPTS_Local(runnumber))) # put together output names new_Tag = Tag if len(prefix) > 0: new_Tag += '_' + prefix basename = '%s_%s_%s' % (new_Tag, runnumber, group) if self.getProperty("LiveData").value: raise RuntimeError('Live data is not currently supported') else: Load(Filename='SNAP' + str(runnumber), OutputWorkspace=basename + '_red', startProgress=progStart, endProgress=progStart + .25 * progDelta) progStart += .25 * progDelta redWS = basename + '_red' # overwrite geometry with detcal files if calib == 'DetCal File': LoadIsawDetCal(InputWorkspace=redWS, Filename=detcalFile) # create unfocussed data if in set-up mode if Process_Mode == "Set-Up": unfocussedWksp = '{}_{}_d'.format(new_Tag, runnumber) else: unfocussedWksp = '' AlignAndFocusPowder( InputWorkspace=redWS, OutputWorkspace=redWS, MaskWorkspace=maskWSname, # can be empty string GroupingWorkspace=group, UnfocussedWorkspace=unfocussedWksp, # can be empty string startProgress=progStart, endProgress=progStart + .5 * progDelta, **alignAndFocusArgs) progStart += .5 * progDelta # the rest takes up .25 percent of the run processing progress = Progress(self, progStart, progStart + .25 * progDelta, 2) # AlignAndFocusPowder leaves the data in time-of-flight ConvertUnits(InputWorkspace=redWS, OutputWorkspace=redWS, Target='dSpacing', EMode='Elastic') # Edit instrument geometry to make final workspace smaller on disk det_table = PreprocessDetectorsToMD( Inputworkspace=redWS, OutputWorkspace='__SNAP_det_table') polar = np.degrees(det_table.column('TwoTheta')) azi = np.degrees(det_table.column('Azimuthal')) EditInstrumentGeometry(Workspace=redWS, L2=det_table.column('L2'), Polar=polar, Azimuthal=azi) mtd.remove('__SNAP_det_table') progress.report('simplify geometry') # AlignAndFocus doesn't necessarily rebin the data correctly if Process_Mode == "Set-Up": Rebin(InputWorkspace=unfocussedWksp, Params=alignAndFocusArgs['Params'], Outputworkspace=unfocussedWksp) NormaliseByCurrent(InputWorkspace=redWS, OutputWorkspace=redWS) # normalize the data as requested normalizationWS = self._generateNormalization( redWS, norm, normalizationWS) normalizedWS = None if normalizationWS is not None: normalizedWS = basename + '_nor' Divide(LHSWorkspace=redWS, RHSWorkspace=normalizationWS, OutputWorkspace=normalizedWS) ReplaceSpecialValues(Inputworkspace=normalizedWS, OutputWorkspace=normalizedWS, NaNValue='0', NaNError='0', InfinityValue='0', InfinityError='0') progress.report('normalized') else: progress.report() # rename everything as appropriate and determine output workspace name if normalizedWS is None: outputWksp = redWS else: outputWksp = normalizedWS if norm == "Extracted from Data" and Process_Mode == "Production": DeleteWorkspace(Workspace=redWS) DeleteWorkspace(Workspace=normalizationWS) # Save requested formats saveDir = self.getPropertyValue("OutputDirectory").strip() if len(saveDir) <= 0: self.log().notice('Using default save location') saveDir = os.path.join(self.get_IPTS_Local(runnumber), 'shared', 'data') self._save(saveDir, basename, outputWksp) # set workspace as an output so it gets history propertyName = 'OutputWorkspace_' + str(outputWksp) self.declareProperty( WorkspaceProperty(propertyName, outputWksp, Direction.Output)) self.setProperty(propertyName, outputWksp) # declare some things as extra outputs in set-up if Process_Mode != "Production": prefix = 'OuputWorkspace_{:d}_'.format(i) propNames = [prefix + it for it in ['d', 'norm', 'normalizer']] wkspNames = [ '%s_%s_d' % (new_Tag, runnumber), basename + '_red', '%s_%s_normalizer' % (new_Tag, runnumber) ] for (propName, wkspName) in zip(propNames, wkspNames): if mtd.doesExist(wkspName): self.declareProperty( WorkspaceProperty(propName, wkspName, Direction.Output)) self.setProperty(propName, wkspName)
def __processFile(self, filename, wkspname, unfocusname, file_prog_start, determineCharacterizations): chunks = determineChunking(filename, self.chunkSize) numSteps = 6 # for better progress reporting - 6 steps per chunk if unfocusname != '': numSteps = 7 # one more for accumulating the unfocused workspace self.log().information('Processing \'{}\' in {:d} chunks'.format( filename, len(chunks))) prog_per_chunk_step = self.prog_per_file * 1. / (numSteps * float(len(chunks))) unfocusname_chunk = '' # inner loop is over chunks for (j, chunk) in enumerate(chunks): prog_start = file_prog_start + float(j) * float( numSteps - 1) * prog_per_chunk_step chunkname = '{}_c{:d}'.format(wkspname, j) if unfocusname != '': # only create unfocus chunk if needed unfocusname_chunk = '{}_c{:d}'.format(unfocusname, j) Load(Filename=filename, OutputWorkspace=chunkname, startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step, **chunk) if determineCharacterizations: self.__determineCharacterizations( filename, chunkname, False) # updates instance variable determineCharacterizations = False prog_start += prog_per_chunk_step if self.filterBadPulses > 0.: FilterBadPulses(InputWorkspace=chunkname, OutputWorkspace=chunkname, LowerCutoff=self.filterBadPulses, startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) prog_start += prog_per_chunk_step # absorption correction workspace if self.absorption is not None and len(str(self.absorption)) > 0: ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname, Target='Wavelength', EMode='Elastic') Divide(LHSWorkspace=chunkname, RHSWorkspace=self.absorption, OutputWorkspace=chunkname, startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname, Target='TOF', EMode='Elastic') prog_start += prog_per_chunk_step AlignAndFocusPowder(InputWorkspace=chunkname, OutputWorkspace=chunkname, UnfocussedWorkspace=unfocusname_chunk, startProgress=prog_start, endProgress=prog_start + 2. * prog_per_chunk_step, **self.kwargs) prog_start += 2. * prog_per_chunk_step # AlignAndFocusPowder counts for two steps if j == 0: self.__updateAlignAndFocusArgs(chunkname) RenameWorkspace(InputWorkspace=chunkname, OutputWorkspace=wkspname) if unfocusname != '': RenameWorkspace(InputWorkspace=unfocusname_chunk, OutputWorkspace=unfocusname) else: Plus(LHSWorkspace=wkspname, RHSWorkspace=chunkname, OutputWorkspace=wkspname, ClearRHSWorkspace=self.kwargs['PreserveEvents'], startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) DeleteWorkspace(Workspace=chunkname) if unfocusname != '': Plus(LHSWorkspace=unfocusname, RHSWorkspace=unfocusname_chunk, OutputWorkspace=unfocusname, ClearRHSWorkspace=self.kwargs['PreserveEvents'], startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) DeleteWorkspace(Workspace=unfocusname_chunk) if self.kwargs['PreserveEvents']: CompressEvents(InputWorkspace=wkspname, OutputWorkspace=wkspname)
def __processFile(self, filename, wkspname, unfocusname, file_prog_start, determineCharacterizations): chunks = determineChunking(filename, self.chunkSize) numSteps = 6 # for better progress reporting - 6 steps per chunk if unfocusname != '': numSteps = 7 # one more for accumulating the unfocused workspace self.log().information('Processing \'{}\' in {:d} chunks'.format( filename, len(chunks))) prog_per_chunk_step = self.prog_per_file * 1. / (numSteps * float(len(chunks))) unfocusname_chunk = '' canSkipLoadingLogs = False # inner loop is over chunks for (j, chunk) in enumerate(chunks): prog_start = file_prog_start + float(j) * float( numSteps - 1) * prog_per_chunk_step chunkname = '{}_c{:d}'.format(wkspname, j) if unfocusname != '': # only create unfocus chunk if needed unfocusname_chunk = '{}_c{:d}'.format(unfocusname, j) # load a chunk - this is a bit crazy long because we need to get an output property from `Load` when it # is run and the algorithm history doesn't exist until the parent algorithm (this) has finished loader = self.__createLoader(filename, chunkname, progstart=prog_start, progstop=prog_start + prog_per_chunk_step) if canSkipLoadingLogs: loader.setProperty('LoadLogs', False) for key, value in chunk.items(): if isinstance(value, str): loader.setPropertyValue(key, value) else: loader.setProperty(key, value) loader.execute() # copy the necessary logs onto the workspace if canSkipLoadingLogs: CopyLogs(InputWorkspace=wkspname, OutputWorkspace=chunkname, MergeStrategy='WipeExisting') # get the underlying loader name if we used the generic one if self.__loaderName == 'Load': self.__loaderName = loader.getPropertyValue('LoaderName') canSkipLoadingLogs = self.__loaderName == 'LoadEventNexus' if determineCharacterizations and j == 0: self.__determineCharacterizations( filename, chunkname) # updates instance variable determineCharacterizations = False prog_start += prog_per_chunk_step if self.filterBadPulses > 0.: FilterBadPulses(InputWorkspace=chunkname, OutputWorkspace=chunkname, LowerCutoff=self.filterBadPulses, startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) prog_start += prog_per_chunk_step # absorption correction workspace if self.absorption is not None and len(str(self.absorption)) > 0: ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname, Target='Wavelength', EMode='Elastic') Divide(LHSWorkspace=chunkname, RHSWorkspace=self.absorption, OutputWorkspace=chunkname, startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) ConvertUnits(InputWorkspace=chunkname, OutputWorkspace=chunkname, Target='TOF', EMode='Elastic') prog_start += prog_per_chunk_step AlignAndFocusPowder(InputWorkspace=chunkname, OutputWorkspace=chunkname, UnfocussedWorkspace=unfocusname_chunk, startProgress=prog_start, endProgress=prog_start + 2. * prog_per_chunk_step, **self.kwargs) prog_start += 2. * prog_per_chunk_step # AlignAndFocusPowder counts for two steps if j == 0: self.__updateAlignAndFocusArgs(chunkname) RenameWorkspace(InputWorkspace=chunkname, OutputWorkspace=wkspname) if unfocusname != '': RenameWorkspace(InputWorkspace=unfocusname_chunk, OutputWorkspace=unfocusname) else: RemoveLogs( Workspace=chunkname) # accumulation has them already Plus(LHSWorkspace=wkspname, RHSWorkspace=chunkname, OutputWorkspace=wkspname, ClearRHSWorkspace=self.kwargs['PreserveEvents'], startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) DeleteWorkspace(Workspace=chunkname) if unfocusname != '': RemoveLogs(Workspace=unfocusname_chunk ) # accumulation has them already Plus(LHSWorkspace=unfocusname, RHSWorkspace=unfocusname_chunk, OutputWorkspace=unfocusname, ClearRHSWorkspace=self.kwargs['PreserveEvents'], startProgress=prog_start, endProgress=prog_start + prog_per_chunk_step) DeleteWorkspace(Workspace=unfocusname_chunk) if self.kwargs['PreserveEvents'] and self.kwargs[ 'CompressTolerance'] > 0.: CompressEvents(InputWorkspace=wkspname, OutputWorkspace=wkspname, WallClockTolerance=self. kwargs['CompressWallClockTolerance'], Tolerance=self.kwargs['CompressTolerance'], StartTime=self.kwargs['CompressStartTime'])