def test_monitor_loaded_in_ws_when_mon_in_spectra_input_and_LoadMonitor_is_true( self): diff_mode = "FoilOut" self._run_load("14188", "1-198", diff_mode, load_mon=True) self.assertTrue(mtd.doesExist('evs_raw')) self.assertEquals(mtd['evs_raw'].getNumberHistograms(), 198) self.assertFalse(mtd.doesExist('evs_raw_monitors'))
def testRawWorkspaceOutput(self): outWSName = 'outWS' rawWSName = 'rawWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'OutputRawWorkspace': rawWSName, 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] inWS = mtd[self._TEST_WS_NAME] self.assertTrue(mtd.doesExist(rawWSName)) rawWS = mtd[rawWSName] ys = rawWS.extractY() originalYS = inWS.extractY() numpy.testing.assert_almost_equal(ys, originalYS[1:, :]) es = rawWS.extractE() originalES = inWS.extractE() numpy.testing.assert_almost_equal(es, originalES[1:, :]) xs = rawWS.extractX() outXS = outWS.extractX() numpy.testing.assert_almost_equal(xs, outXS) Ei = rawWS.getRun().getProperty('Ei').value outEi = outWS.getRun().getProperty('Ei').value self.assertEqual(Ei, outEi) wavelength = outWS.getRun().getProperty('wavelength').value outWavelength = outWS.getRun().getProperty('wavelength').value self.assertEqual(wavelength, outWavelength)
def test_unary_ops_with_workspaces_not_in_ADS(self): mdws = CreateMDHistoWorkspace(SignalInput=[0], ErrorInput=[0], Dimensionality=1, Extents=[0, 1], NumberOfBins=1, Names=['a'], Units=['TOF'], StoreInADS=False) mdws_ads = CreateMDHistoWorkspace(SignalInput=[0], ErrorInput=[0], Dimensionality=1, Extents=[0, 1], NumberOfBins=1, Names=['a'], Units=['TOF'], StoreInADS=True) result1 = ~mdws self.assertTrue(mtd.doesExist('result1')) result2 = ~mdws_ads self.assertTrue(mtd.doesExist('result2'))
def testRawWorkspaceOutput(self): outWSName = 'outWS' rawWSName = 'rawWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'OutputRawWorkspace': rawWSName, 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] inWS = mtd[self._TEST_WS_NAME] self.assertTrue(mtd.doesExist(rawWSName)) rawWS = mtd[rawWSName] ys = rawWS.extractY() originalYS = inWS.extractY() assert_almost_equal(ys, originalYS[:-1, :]) es = rawWS.extractE() originalES = inWS.extractE() assert_almost_equal(es, originalES[:-1, :]) xs = rawWS.extractX() outXS = outWS.extractX() assert_almost_equal(xs, outXS) Ei = rawWS.getRun().getProperty('Ei').value outEi = outWS.getRun().getProperty('Ei').value self.assertEqual(Ei, outEi) wavelength = outWS.getRun().getProperty('wavelength').value outWavelength = outWS.getRun().getProperty('wavelength').value self.assertEqual(wavelength, outWavelength)
def __setupCalibration(self, wksp): '''Convert whatever calibration/grouping/masking into workspaces that will be passed down''' if self.haveDeterminedCalibration: return # nothing to do self.haveDeterminedCalibration = True # first see if the workspaces have been specified # check that the canonical names don't already exist as a backup if not self.getProperty('CalibrationWorkspace').isDefault: self.__calWksp = self.getPropertyValue('CalibrationWorkspace') elif not self.getProperty('OffsetsWorkspace').isDefault: self.__calWksp = self.getPropertyValue('OffsetsWorkspace') + '_cal' ConvertDiffCal(OffsetsWorkspace=self.getPropertyValue('OffsetsWorkspace'), OutputWorkspace=self.instr + '_cal') self.setProperty('CalibrationWorkspace', self.__calWksp) elif mtd.doesExist(self.instr + '_cal'): self.__calWksp = self.instr + '_cal' if not self.getProperty('GroupingWorkspace').isDefault: self.__grpWksp = self.getPropertyValue('GroupingWorkspace') elif mtd.doesExist(self.instr + '_group'): self.__grpWksp = self.instr + '_group' if not self.getProperty('MaskWorkspace').isDefault: self.__mskWksp = self.getPropertyValue('MaskWorkspace') elif mtd.doesExist(self.instr + '_mask'): self.__mskWksp = self.instr + '_mask' # check that anything was specified if self.getProperty('CalFileName').isDefault and self.getProperty('GroupFilename').isDefault: self.kwargs = self.__getAlignAndFocusArgs() return # decide what to load loadCalibration = not bool(self.__calWksp) loadGrouping = not bool(self.__grpWksp) loadMask = not bool(self.__mskWksp) # load and update if loadCalibration or loadGrouping or loadMask: if not wksp: raise RuntimeError('Trying to load calibration without a donor workspace') LoadDiffCal(InputWorkspace=wksp, Filename=self.getPropertyValue('CalFileName'), GroupFilename=self.getPropertyValue('GroupFilename'), MakeCalWorkspace=loadCalibration, MakeGroupingWorkspace=loadGrouping, MakeMaskWorkspace=loadMask, WorkspaceName=self.instr) if loadCalibration: self.__calWksp = self.instr + '_cal' self.setPropertyValue('CalibrationWorkspace', self.instr + '_cal') if loadGrouping: self.__grpWksp = self.instr + '_group' self.setPropertyValue('GroupingWorkspace', self.instr + '_group') if loadMask: self.__mskWksp = self.instr + '_mask' self.setPropertyValue('MaskWorkspace', self.instr + '_mask') self.kwargs = self.__getAlignAndFocusArgs()
def testOutputIsDistribution(self): outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'OutputSofThetaEnergyWorkspace': 'SofThetaE', 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) ws = mtd[outWSName] self.assertTrue(ws.isDistribution()) self.assertTrue(mtd.doesExist('SofThetaE')) ws = mtd['SofThetaE'] self.assertTrue(ws.isDistribution())
def __determineCharacterizations(self, filename, wkspname): useCharac = bool(self.charac is not None) loadFile = not mtd.doesExist(wkspname) # input workspace is only needed to find a row in the characterizations table tempname = None if loadFile: if useCharac: tempname = '__%s_temp' % wkspname # set the loader for this file loader = self.__createLoader(filename, tempname) loader.setProperty('MetaDataOnly', True) # this is only supported by LoadEventNexus loader.execute() # get the underlying loader name if we used the generic one if self.__loaderName == 'Load': self.__loaderName = loader.getPropertyValue('LoaderName') else: tempname = wkspname # assume it is already loaded # put together argument list args = dict(ReductionProperties=self.getProperty('ReductionProperties').valueAsStr) for name in PROPS_FOR_PD_CHARACTER: prop = self.getProperty(name) if not prop.isDefault: args[name] = prop.value if tempname is not None: args['InputWorkspace'] = tempname if useCharac: args['Characterizations'] = self.charac PDDetermineCharacterizations(**args) if loadFile and useCharac: DeleteWorkspace(Workspace=tempname)
def __accumulate(self, chunkname, sumname, chunkunfocusname, sumuunfocusname, firstrun, removelogs=False): """accumulate newdata `wkspname` into sum `sumwkspname` and delete `wkspname`""" # the first call to accumulate to a specific target should be a simple rename self.log().debug('__accumulate({}, {}, {}, {}, {})'.format(chunkname, sumname, chunkunfocusname, sumuunfocusname, firstrun)) if chunkname == sumname: return # there is nothing to be done if not firstrun: # if the sum workspace doesn't already exist, just rename if not mtd.doesExist(sumname): firstrun = True if firstrun: if chunkname != sumname: RenameWorkspace(InputWorkspace=chunkname, OutputWorkspace=sumname) if chunkunfocusname and chunkunfocusname != sumuunfocusname: RenameWorkspace(InputWorkspace=chunkunfocusname, OutputWorkspace=sumuunfocusname) else: if removelogs: RemoveLogs(Workspace=chunkname) # accumulation has them already RebinToWorkspace(WorkspaceToRebin=chunkname, WorkspaceToMatch=sumname, OutputWorkspace=chunkname) Plus(LHSWorkspace=sumname, RHSWorkspace=chunkname, OutputWorkspace=sumname, ClearRHSWorkspace=self.kwargs['PreserveEvents']) DeleteWorkspace(Workspace=chunkname) self.__compressEvents(sumname) # could be smarter about when to run if chunkunfocusname and chunkunfocusname != sumuunfocusname: if removelogs: RemoveLogs(Workspace=chunkunfocusname) # accumulation has them already Plus(LHSWorkspace=sumuunfocusname, RHSWorkspace=chunkunfocusname, OutputWorkspace=sumuunfocusname, ClearRHSWorkspace=self.kwargs['PreserveEvents']) DeleteWorkspace(Workspace=chunkunfocusname) self.__compressEvents(sumuunfocusname) # could be smarter about when to run
def testNormalisationToTimeWhenMonitorCountsAreTooLow(self): outWSName = 'outWS' duration = 3612.3 logs = mtd[self._TEST_WS_NAME].mutableRun() logs.addProperty('duration', duration, True) monsum = 10 logs.addProperty('monitor.monsum', monsum, True) algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'FlatBkg': 'Flat Bkg OFF', 'IncidentEnergyCalibration': 'Energy Calibration OFF', 'Normalisation': 'Normalisation Monitor', 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] inWS = mtd[self._TEST_WS_NAME] ys = outWS.extractY() originalYs = inWS.extractY() assert_almost_equal(ys, originalYs[:-1, :] / duration) es = outWS.extractE() originalEs = inWS.extractE() assert_almost_equal(es, originalEs[:-1, :] / duration)
def testNoOperationClonesInputWorkspace(self): ws = self._cloneTestWorkspace() outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'rethrow': True } run_algorithm('DirectILLApplySelfShielding', **algProperties) # If the previous run didn't clone the input workspace, the two later # calls will be triggered to use 'outWS' as the input. self.assertTrue(mtd.doesExist(outWSName)) corrFactor = 0.43 corrWS = self._cloneTestWorkspace('correctionWS') for i in range(corrWS.getNumberHistograms()): ys = corrWS.dataY(i) ys.fill(corrFactor) es = corrWS.dataE(i) es.fill(0) algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'SelfShieldingCorrectionWorkspace': corrWS, 'rethrow': True } run_algorithm('DirectILLApplySelfShielding', **algProperties) run_algorithm('DirectILLApplySelfShielding', **algProperties) outWS = mtd[outWSName] self.assertEqual(outWS.getNumberHistograms(), ws.getNumberHistograms()) ys = outWS.extractY() originalYs = ws.extractY() assert_almost_equal(ys, originalYs / corrFactor) es = outWS.extractE() originalEs = ws.extractE() assert_almost_equal(es, originalEs / corrFactor)
def __accumulate(self, chunkname, sumname, chunkunfocusname, sumuunfocusname, firstrun, removelogs=False): """accumulate newdata `wkspname` into sum `sumwkspname` and delete `wkspname`""" # the first call to accumulate to a specific target should be a simple rename self.log().debug('__accumulate({}, {}, {}, {}, {})'.format(chunkname, sumname, chunkunfocusname, sumuunfocusname, firstrun)) if chunkname == sumname: return # there is nothing to be done if not firstrun: # if the sum workspace doesn't already exist, just rename if not mtd.doesExist(sumname): firstrun = True if firstrun: if chunkname != sumname: RenameWorkspace(InputWorkspace=chunkname, OutputWorkspace=sumname) if chunkunfocusname and chunkunfocusname != sumuunfocusname: RenameWorkspace(InputWorkspace=chunkunfocusname, OutputWorkspace=sumuunfocusname) else: if removelogs: RemoveLogs(Workspace=chunkname) # accumulation has them already Plus(LHSWorkspace=sumname, RHSWorkspace=chunkname, OutputWorkspace=sumname, ClearRHSWorkspace=self.kwargs['PreserveEvents']) DeleteWorkspace(Workspace=chunkname) self.__compressEvents(sumname) # could be smarter about when to run if chunkunfocusname and chunkunfocusname != sumuunfocusname: if removelogs: RemoveLogs(Workspace=chunkunfocusname) # accumulation has them already Plus(LHSWorkspace=sumuunfocusname, RHSWorkspace=chunkunfocusname, OutputWorkspace=sumuunfocusname, ClearRHSWorkspace=self.kwargs['PreserveEvents']) DeleteWorkspace(Workspace=chunkunfocusname) self.__compressEvents(sumuunfocusname) # could be smarter about when to run
def load_file_and_apply(self, filename, ws_name): Load(Filename=filename, OutputWorkspace=ws_name, FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if self._load_inst: LoadInstrument(Workspace=ws_name, Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if self._apply_cal: ApplyCalibration( Workspace=ws_name, CalibrationTable=self.getProperty("ApplyCalibration").value) if self._detcal: LoadIsawDetCal(InputWorkspace=ws_name, Filename=self.getProperty("DetCal").value) if self._copy_params: CopyInstrumentParameters(OutputWorkspace=ws_name, InputWorkspace=self.getProperty( "CopyInstrumentParameters").value) if self._masking: if not mtd.doesExist('__mask'): LoadMask(Instrument=mtd[ws_name].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace=ws_name, MaskedWorkspace='__mask') if self.XMin != Property.EMPTY_DBL and self.XMax != Property.EMPTY_DBL: ConvertUnits(InputWorkspace=ws_name, OutputWorkspace=ws_name, Target='Momentum') CropWorkspaceForMDNorm(InputWorkspace=ws_name, OutputWorkspace=ws_name, XMin=self.XMin, XMax=self.XMax)
def testSuccessWhenEverythingDisabled(self): outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'FlatBkg': 'Flat Bkg OFF', 'IncidentEnergyCalibration': 'Energy Calibration OFF', 'Normalisation': 'Normalisation OFF', 'ElasticChannel': 'Default Elastic Channel', 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] inWS = mtd[self._TEST_WS_NAME] self.assertEquals(outWS.getNumberHistograms(), inWS.getNumberHistograms() - 1) xs = outWS.extractX() originalXs = inWS.extractX() numpy.testing.assert_almost_equal(xs, originalXs[1:, :]) ys = outWS.extractY() originalYs = inWS.extractY() numpy.testing.assert_almost_equal(ys, originalYs[1:, :]) es = outWS.extractE() originalEs = inWS.extractE() numpy.testing.assert_almost_equal(es, originalEs[1:, :])
def testMaskedComponents(self): inWS = mtd[self._RAW_WS_NAME] spectraCount = inWS.getNumberHistograms() outWSName = 'diagnosticsWS' kwargs = { 'InputWorkspace': self._RAW_WS_NAME, 'OutputWorkspace': outWSName, 'ElasticPeakDiagnostics': 'Peak Diagnostics OFF', 'BkgDiagnostics': 'Bkg Diagnostics OFF', 'BeamStopDiagnostics': 'Beam Stop Diagnostics OFF', 'DefaultMask': 'Default Mask OFF', 'MaskedComponents': 'tube_1', 'rethrow': True } run_algorithm('DirectILLDiagnostics', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), spectraCount) self.assertEquals(outWS.blocksize(), 1) for i in range(spectraCount): Ys = outWS.readY(i) detector = outWS.getDetector(i) componentName = detector.getFullName() if 'tube_1' in componentName: self.assertEquals(Ys[0], 1) else: self.assertEquals(Ys[0], 0)
def testNoOperationClonesInputWorkspace(self): ws = self._cloneTestWorkspace() outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'rethrow': True } run_algorithm('DirectILLApplySelfShielding', **algProperties) # If the previous run didn't clone the input workspace, the two later # calls will be triggered to use 'outWS' as the input. self.assertTrue(mtd.doesExist(outWSName)) corrFactor = 0.43 corrWS = self._cloneTestWorkspace('correctionWS') for i in range(corrWS.getNumberHistograms()): ys = corrWS.dataY(i) ys.fill(corrFactor) es = corrWS.dataE(i) es.fill(0) algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'SelfShieldingCorrectionWorkspace': corrWS, 'rethrow': True } run_algorithm('DirectILLApplySelfShielding', **algProperties) run_algorithm('DirectILLApplySelfShielding', **algProperties) outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), ws.getNumberHistograms()) ys = outWS.extractY() originalYs = ws.extractY() numpy.testing.assert_almost_equal(ys, originalYs / corrFactor) es = outWS.extractE() originalEs = ws.extractE() numpy.testing.assert_almost_equal(es, originalEs / corrFactor)
def testSelfShieldingCorrections(self): ws = self._cloneTestWorkspace() corrFactor = 0.789 corrWS = self._cloneTestWorkspace('correctionWS') for i in range(corrWS.getNumberHistograms()): ys = corrWS.dataY(i) ys.fill(corrFactor) es = corrWS.dataE(i) es.fill(0) outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'SelfShieldingCorrectionWorkspace': corrWS, 'rethrow': True } run_algorithm('DirectILLApplySelfShielding', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), ws.getNumberHistograms()) ys = outWS.extractY() originalYs = ws.extractY() numpy.testing.assert_almost_equal(ys, originalYs / corrFactor) es = outWS.extractE() originalEs = ws.extractE() numpy.testing.assert_almost_equal(es, originalEs / corrFactor)
def testMaskedComponents(self): inWS = mtd[self._RAW_WS_NAME] spectraCount = inWS.getNumberHistograms() outWSName = 'diagnosticsWS' kwargs = { 'InputWorkspace': self._RAW_WS_NAME, 'OutputWorkspace': outWSName, 'ElasticPeakDiagnostics': 'Peak Diagnostics OFF', 'BkgDiagnostics': 'Bkg Diagnostics OFF', 'BeamStopDiagnostics': 'Beam Stop Diagnostics OFF', 'DefaultMask': 'Default Mask OFF', 'MaskedComponents': 'tube_1', 'rethrow': True } run_algorithm('DirectILLDiagnostics', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEqual(outWS.getNumberHistograms(), spectraCount) self.assertEqual(outWS.blocksize(), 1) for i in range(spectraCount): Ys = outWS.readY(i) detector = outWS.getDetector(i) componentName = detector.getFullName() if 'tube_1' in componentName: self.assertEqual(Ys[0], 1) else: self.assertEqual(Ys[0], 0)
def testSuccessWhenEverythingDisabled(self): outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'FlatBkg': 'Flat Bkg OFF', 'IncidentEnergyCalibration': 'Energy Calibration OFF', 'Normalisation': 'Normalisation OFF', 'ElasticChannel': 'Default Elastic Channel', 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] inWS = mtd[self._TEST_WS_NAME] self.assertEqual(outWS.getNumberHistograms(), inWS.getNumberHistograms() - 1) xs = outWS.extractX() originalXs = inWS.extractX() assert_almost_equal(xs, originalXs[:-1, :]) ys = outWS.extractY() originalYs = inWS.extractY() assert_almost_equal(ys, originalYs[:-1, :]) es = outWS.extractE() originalEs = inWS.extractE() assert_almost_equal(es, originalEs[:-1, :])
def testAbsoluteUnits(self): _add_natural_angle_step_parameter(self._TEST_WS_NAME) geometry = { 'Shape': 'HollowCylinder', 'Height': 4.0, 'InnerRadius': 1.9, 'OuterRadius': 2.0, 'Center': [0.0, 0.0, 0.0]} material = { 'ChemicalFormula': 'Cd S', 'SampleNumberDensity': 0.01} SetSample(self._TEST_WS_NAME, geometry, material) vgeometry = { 'Shape': 'HollowCylinder', 'Height': 4.0, 'InnerRadius': 1.9, 'OuterRadius': 2.0, 'Center': [0.0, 0.0, 0.0]} vmaterial = { 'ChemicalFormula': 'V', 'SampleNumberDensity': 0.1} SetSample(self._VANADIUM_WS_NAME, vgeometry, vmaterial) outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'SubalgorithmLogging': 'Logging ON', 'AbsoluteUnitsNormalisation': 'Absolute Units ON', 'IntegratedVanadiumWorkspace': self._VANADIUM_WS_NAME, 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) self.assertTrue(mtd.doesExist(outWSName))
def test_monitors_loaded_into_ADS_when_monitor_load_is_true_for_back_scattering(self): diff_mode = "SingleDifference" self._run_load("14188", "3-134", diff_mode, load_mon=True) self.assertTrue(mtd.doesExist('evs_raw_monitors')) monitor_ws = mtd['evs_raw_monitors'] self.assertTrue(isinstance(monitor_ws, MatrixWorkspace)) self.assertEqual(monitor_ws.readX(0)[0], 5) self.assertEqual(monitor_ws.readX(0)[-1], 19990)
def PyExec(self): self._loadCharacterizations() charac = "" if mtd.doesExist("characterizations"): charac = "characterizations" # arguments for both AlignAndFocusPowder and AlignAndFocusPowderFromFiles self._alignArgs['OutputWorkspace'] = self.getPropertyValue("OutputWorkspace") self._alignArgs['RemovePromptPulseWidth'] = self.getProperty("RemovePromptPulseWidth").value self._alignArgs['CompressTolerance'] = COMPRESS_TOL_TOF self._alignArgs['PreserveEvents'] = True self._alignArgs['CalFileName'] = self.getProperty("CalibrationFile").value self._alignArgs['Params']=self.getProperty("Binning").value self._alignArgs['ResampleX']=self.getProperty("ResampleX").value self._alignArgs['Dspacing']=True self._alignArgs['CropWavelengthMin'] = self.getProperty('CropWavelengthMin').value self._alignArgs['CropWavelengthMax'] = self.getProperty('CropWavelengthMax').value self._alignArgs['ReductionProperties'] = '__snspowderreduction' wksp = self.getProperty("InputWorkspace").value if wksp is None: # run from file with caching wksp = AlignAndFocusPowderFromFiles(Filename=self.getProperty("Filename").value, CacheDir=self.getProperty("CacheDir").value, MaxChunkSize=self.getProperty("MaxChunkSize").value, FilterBadPulses=self.getProperty("FilterBadPulses").value, Characterizations=charac, FrequencyLogNames=self.getProperty("FrequencyLogNames").value, WaveLengthLogNames=self.getProperty("WaveLengthLogNames").value, **(self._alignArgs)) else: # process the input workspace self.log().information("Using input workspace. Ignoring properties 'Filename', " + "'OutputWorkspace', 'MaxChunkSize', and 'FilterBadPulses'") # get the correct row of the table PDDetermineCharacterizations(InputWorkspace=wksp, Characterizations=charac, ReductionProperties="__snspowderreduction", FrequencyLogNames=self.getProperty("FrequencyLogNames").value, WaveLengthLogNames=self.getProperty("WaveLengthLogNames").value) wksp = AlignAndFocusPowder(InputWorkspace=wksp, **(self._alignArgs)) wksp = NormaliseByCurrent(InputWorkspace=wksp, OutputWorkspace=wksp) wksp.getRun()['gsas_monitor'] = 1 if self._iparmFile is not None: wksp.getRun()['iparm_file'] = self._iparmFile wksp = SetUncertainties(InputWorkspace=wksp, OutputWorkspace=wksp, SetError="sqrtOrOne") SaveGSS(InputWorkspace=wksp, Filename=self.getProperty("PDFgetNFile").value, SplitFiles=False, Append=False, MultiplyByBinWidth=False, Bank=mantid.pmds["__snspowderreduction"]["bank"].value, Format="SLOG", ExtendedHeader=True) self.setProperty("OutputWorkspace", wksp)
def testSuccessfulRun(self): outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'SubalgorithmLogging': 'Logging ON', 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) self.assertTrue(mtd.doesExist(outWSName))
def _delete(self, ws): """Delete the given workspace in ws if it is not protected, and deletion is actually turned on. """ if not self._doDelete: return try: ws = str(ws) except RuntimeError: return if ws not in self._protected and mtd.doesExist(ws): DeleteWorkspace(Workspace=ws, EnableLogging=self._deleteAlgorithmLogging)
def __determineCharacterizations(self, filename, wkspname): useCharTable = self.__isCharacterizationsNeeded() needToLoadCal = self.__needToLoadCal() # something needs to use the workspace and it needs to not already be in memory loadFile = (useCharTable or needToLoadCal) and (not mtd.doesExist(wkspname)) # input workspace is only needed to find a row in the characterizations table tempname = None if loadFile: if useCharTable or needToLoadCal: tempname = '__%s_temp' % wkspname # set the loader for this file try: # MetaDataOnly=True is only supported by LoadEventNexus loader = self.__createLoader(filename, tempname, MetaDataOnly=True) loader.execute() # get the underlying loader name if we used the generic one if self.__loaderName == 'Load': self.__loaderName = loader.getPropertyValue( 'LoaderName') except RuntimeError: # give up and load the whole file - this can be expensive Load(OutputWorkspace=tempname, Filename=filename) else: tempname = wkspname # assume it is already loaded # some bit of data has been loaded so use it to get the characterizations self.__setupCalibration(tempname) # put together argument list for determining characterizations args = dict(ReductionProperties=self.getProperty( 'ReductionProperties').valueAsStr) for name in PROPS_FOR_PD_CHARACTER: prop = self.getProperty(name) if not prop.isDefault: args[name] = prop.value if tempname is not None: args['InputWorkspace'] = tempname if useCharTable: args['Characterizations'] = self.charac if useCharTable: PDDetermineCharacterizations(**args) if loadFile and (useCharTable or needToLoadCal): DeleteWorkspace(Workspace=tempname)
def _createDiagnosticsReportTable(reportWSName, numberHistograms, algorithmLogging): """Return a table workspace for detector diagnostics reporting.""" if mtd.doesExist(reportWSName): reportWS = mtd[reportWSName] else: reportWS = CreateEmptyTableWorkspace(OutputWorkspace=reportWSName, EnableLogging=algorithmLogging) existingColumnNames = reportWS.getColumnNames() if 'WorkspaceIndex' not in existingColumnNames: reportWS.addColumn('int', 'WorkspaceIndex', _PLOT_TYPE_X) reportWS.setRowCount(numberHistograms) for i in range(numberHistograms): reportWS.setCell('WorkspaceIndex', i, i) return reportWS
def testDirectBeamMasking(self): beamWSName = 'beam_masking_ws' kwargs = { 'OutputWorkspace': beamWSName, 'Function': 'One Peak', 'rethrow': True } run_algorithm('CreateSampleWorkspace', **kwargs) kwargs = { 'Workspace': beamWSName, 'ParameterName': 'beam_stop_diagnostics_spectra', 'ParameterType': 'String', 'Value': '43-57, 90-110, 145-155', # Spectrum numbers. 'rethrow': True } run_algorithm('SetInstrumentParameter', **kwargs) beamWS = mtd[beamWSName] # From now on, we work on workspace indices. # First range is fully covered by the beam stop. for i in range(42, 57): ys = beamWS.dataY(i) ys *= 0.0 # Second range is partially covered by the beam stop. # Actually, only this range will be recongnized as beam stop's shadow. for i in range(92, 105): ys = beamWS.dataY(i) ys *= 0.0 # The third range is not covered by the beam stop at all. outWSName = 'diagnosticsWS' kwargs = { 'InputWorkspace': beamWSName, 'OutputWorkspace': outWSName, 'ElasticPeakDiagnostics': 'Peak Diagnostics OFF', 'BkgDiagnostics': 'Bkg Diagnostics OFF', 'DefaultMask': 'Default Mask OFF', 'rethrow': True } run_algorithm('DirectILLDiagnostics', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEqual(outWS.getNumberHistograms(), beamWS.getNumberHistograms()) self.assertEqual(outWS.blocksize(), 1) for i in range(outWS.getNumberHistograms()): ys = outWS.readY(i) if i >= 92 and i < 105: self.assertEqual(ys[0], 1) else: self.assertEqual(ys[0], 0)
def test_binary_ops_with_workspaces_not_in_ADS(self): ws = CreateSampleWorkspace(StoreInADS=False) initialY = ws.readY(0)[0] ws_ads = CreateSampleWorkspace(StoreInADS=True) result1 = ws + ws self.assertTrue(mtd.doesExist('result1')) result2 = ws + ws_ads self.assertTrue(mtd.doesExist('result2')) result3 = ws_ads + ws self.assertTrue(mtd.doesExist('result3')) result4 = ws_ads + ws_ads self.assertTrue(mtd.doesExist('result4')) result5 = ws + 1 self.assertTrue(mtd.doesExist('result5')) result6 = 1 + ws self.assertTrue(mtd.doesExist('result6')) result7 = ws_ads + 1 self.assertTrue(mtd.doesExist('result7')) result8 = 1 + ws_ads self.assertTrue(mtd.doesExist('result8')) ws += 1 self.assertFalse(mtd.doesExist('ws')) ws_ads += 1 self.assertTrue(mtd.doesExist('ws_ads'))
def testDirectBeamMasking(self): beamWSName = 'beam_masking_ws' kwargs = { 'OutputWorkspace': beamWSName, 'Function': 'One Peak', 'rethrow': True } run_algorithm('CreateSampleWorkspace', **kwargs) kwargs = { 'Workspace': beamWSName, 'ParameterName': 'beam_stop_diagnostics_spectra', 'ParameterType': 'String', 'Value': '43-57, 90-110, 145-155', # Spectrum numbers. 'rethrow': True } run_algorithm('SetInstrumentParameter', **kwargs) beamWS = mtd[beamWSName] # From now on, we work on workspace indices. # First range is fully covered by the beam stop. for i in range(42, 57): ys = beamWS.dataY(i) ys *= 0.0 # Second range is partially covered by the beam stop. # Actually, only this range will be recongnized as beam stop's shadow. for i in range(92, 105): ys = beamWS.dataY(i) ys *= 0.0 # The third range is not covered by the beam stop at all. outWSName = 'diagnosticsWS' kwargs = { 'InputWorkspace': beamWSName, 'OutputWorkspace': outWSName, 'ElasticPeakDiagnostics': 'Peak Diagnostics OFF', 'BkgDiagnostics': 'Bkg Diagnostics OFF', 'DefaultMask': 'Default Mask OFF', 'rethrow': True } run_algorithm('DirectILLDiagnostics', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), beamWS.getNumberHistograms()) self.assertEquals(outWS.blocksize(), 1) for i in range(outWS.getNumberHistograms()): ys = outWS.readY(i) if i >= 92 and i < 105: self.assertEquals(ys[0], 1) else: self.assertEquals(ys[0], 0)
def testOutputHasCommonBinningWithInput(self): self._setDefaultSample(self._TEST_WS_NAME) outWSName = 'correctionWS' kwargs = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'rethrow': True } run_algorithm('DirectILLSelfShielding', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) inWS = mtd[self._TEST_WS_NAME] outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), inWS.getNumberHistograms()) xs = outWS.extractX() originalXs = inWS.extractX() numpy.testing.assert_almost_equal(xs, originalXs[:, :])
def testOutputHasCommonBinningWithInput(self): self._setDefaultSample(self._TEST_WS_NAME) outWSName = 'correctionWS' kwargs = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'rethrow': True } run_algorithm('DirectILLSelfShielding', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) inWS = mtd[self._TEST_WS_NAME] outWS = mtd[outWSName] self.assertEqual(outWS.getNumberHistograms(), inWS.getNumberHistograms()) xs = outWS.extractX() originalXs = inWS.extractX() numpy.testing.assert_almost_equal(xs, originalXs[:, :])
def testOutputIncidentEnergyWorkspaceWhenEnergyCalibrationIsOff(self): outWSName = 'outWS' eiWSName = 'Ei' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'IncidentEnergyCalibration': 'Energy Calibration OFF', 'OutputIncidentEnergyWorkspace': eiWSName, 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(eiWSName)) eiWS = mtd[eiWSName] inWS = mtd[self._TEST_WS_NAME] E_i = inWS.run().getProperty('Ei').value self.assertEqual(eiWS.readY(0)[0], E_i)
def testExecutes(self): ws = illhelpers.create_poor_mans_d17_workspace() illhelpers.add_flipper_configuration_D17(ws, 1, 1) mtd.add('ws', ws) illhelpers.refl_create_beam_position_ws('beamPosWS', ws, 0., 128) ws = illhelpers.refl_preprocess('ws', ws, 'beamPosWS') ws = illhelpers.refl_sum_foreground('ws', 'SumInLambda', ws) args = { 'InputWorkspaces': 'ws', 'OutputWorkspace': 'corrected', 'EfficiencyFile': 'ILL/D17/PolarizationFactors.txt', 'rethrow': True, 'child': True } alg = create_algorithm('ReflectometryILLPolarizationCor', **args) assertRaisesNothing(self, alg.execute) self.assertTrue(mtd.doesExist('corrected_++'))
def __determineCharacterizations(self, filename, wkspname): useCharTable = self.__isCharacterizationsNeeded() needToLoadCal = self.__needToLoadCal() # something needs to use the workspace and it needs to not already be in memory loadFile = (useCharTable or needToLoadCal) and (not mtd.doesExist(wkspname)) # input workspace is only needed to find a row in the characterizations table tempname = None if loadFile: if useCharTable or needToLoadCal: tempname = '__%s_temp' % wkspname # set the loader for this file try: # MetaDataOnly=True is only supported by LoadEventNexus loader = self.__createLoader(filename, tempname, MetaDataOnly=True) loader.execute() # get the underlying loader name if we used the generic one if self.__loaderName == 'Load': self.__loaderName = loader.getPropertyValue('LoaderName') except RuntimeError: # give up and load the whole file - this can be expensive Load(OutputWorkspace=tempname, Filename=filename) else: tempname = wkspname # assume it is already loaded # some bit of data has been loaded so use it to get the characterizations self.__setupCalibration(tempname) # put together argument list for determining characterizations args = dict(ReductionProperties=self.getProperty('ReductionProperties').valueAsStr) for name in PROPS_FOR_PD_CHARACTER: prop = self.getProperty(name) if not prop.isDefault: args[name] = prop.value if tempname is not None: args['InputWorkspace'] = tempname if useCharTable: args['Characterizations'] = self.charac if useCharTable: PDDetermineCharacterizations(**args) if loadFile and (useCharTable or needToLoadCal): DeleteWorkspace(Workspace=tempname)
def testQRebinning(self): outWSName = 'outWS' Q0 = 2.3 dQ = 0.1 Q1 = 2.7 algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'QBinningParams': [Q0, dQ, Q1], 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) ws = mtd[outWSName] self.assertEqual(ws.getAxis(0).getUnit().unitID(), 'MomentumTransfer') xs = ws.readX(0) numpy.testing.assert_almost_equal(xs, numpy.arange(Q0, Q1, dQ))
def testIncidentEnergyPanther(self): outWSName = 'outWS' eiWSName = 'Ei' algProperties = { 'Run': 'ILL/PANTHER/002687.nxs', 'OutputWorkspace': outWSName, 'IncidentEnergyCalibration': 'Energy Calibration ON', 'OutputIncidentEnergyWorkspace': eiWSName, 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(eiWSName)) eiWS = mtd[eiWSName] outWS = mtd[outWSName] E_i = outWS.run().getProperty('Ei').value assert_almost_equal(eiWS.readY(0)[0], E_i, 2) assert_almost_equal(E_i, 75.37, 2)
def testBackgroundOutput(self): outWSName = 'outWS' outBkgWSName = 'outBkg' bkgScaling = 0.33 # Output should not be scaled, actually. algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'FlatBkg': 'Flat Bkg ON', 'FlatBkgScaling': bkgScaling, 'IncidentEnergyCalibration': 'Energy Calibration OFF', 'Normalisation': 'Normalisation OFF', 'OutputFlatBkgWorkspace': outBkgWSName, 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outBkgWSName)) outBkgWS = mtd[outBkgWSName] assert_almost_equal(outBkgWS.extractY(), self._BKG_LEVEL)
def testQRebinningBinWidthOnly(self): outWSName = 'outWS' dQ = 0.1 algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'QBinningParams': [dQ], 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) ws = mtd[outWSName] self.assertEqual(ws.getAxis(0).getUnit().unitID(), 'MomentumTransfer') xs = ws.readX(0) self.assertGreater(len(xs), 3) dx = xs[1:] - xs[:-1] # Bin widths may differ at the edges. numpy.testing.assert_almost_equal(dx[1:-1], 0.1)
def testBackgroundOutput(self): outWSName = 'outWS' outBkgWSName = 'outBkg' bkgScaling = 0.33 # Output should not be scaled, actually. algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'FlatBkg': 'Flat Bkg ON', 'FlatBkgScaling': bkgScaling, 'IncidentEnergyCalibration': 'Energy Calibration OFF', 'Normalisation': 'Normalisation OFF', 'OutputFlatBkgWorkspace': outBkgWSName, 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outBkgWSName)) outBkgWS = mtd[outBkgWSName] numpy.testing.assert_almost_equal(outBkgWS.extractY(), self._BKG_LEVEL)
def testHybridERebinningUserConstrainedAutoRange(self): outWSName = 'outWS' E0 = -0.23 E1 = 0.32 algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'EnergyRebinning': '{},a,{}'.format(E0, E1), 'Transposing': 'Transposing OFF', 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) ws = mtd[outWSName] self.assertEqual(ws.getAxis(0).getUnit().unitID(), 'DeltaE') xs = ws.readX(0) self.assertEqual(xs[0], E0) self.assertEqual(xs[-1], E1)
def testHybridERebinningSingleUserRange(self): outWSName = 'outWS' E0 = -2. dE = 0.13 E1 = E0 + 40 * dE algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'EnergyRebinning': '{},{},{}'.format(E0, dE, E1), 'Transposing': 'Transposing OFF', 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) ws = mtd[outWSName] self.assertEqual(ws.getAxis(0).getUnit().unitID(), 'DeltaE') xs = ws.readX(0) numpy.testing.assert_almost_equal(xs, numpy.arange(E0, E1 + 0.01, dE))
def testHybridERebinningSingleUserRange(self): outWSName = 'outWS' E0 = -2. dE = 0.13 E1 = E0 + 40 * dE algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'EnergyRebinning': '{},{},{}'.format(E0, dE, E1), 'Transposing': 'Transposing OFF', 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) ws = mtd[outWSName] self.assertEqual(ws.getAxis(0).getUnit().unitID(), 'DeltaE') xs = ws.readX(0) assert_almost_equal(xs, numpy.arange(E0, E1 + 0.01, dE))
def testBackgroundSubtraction(self): outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'FlatBkg': 'Flat Bkg ON', 'FlatBkgScaling': 1.0, 'IncidentEnergyCalibration': 'Energy Calibration OFF', 'Normalisation': 'Normalisation OFF', 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] inWS = mtd[self._TEST_WS_NAME] self.assertEquals(outWS.getNumberHistograms(), inWS.getNumberHistograms() - 1) ys = outWS.extractY() originalYs = inWS.extractY() numpy.testing.assert_almost_equal(ys, originalYs[1:, :] - self._BKG_LEVEL)
def testAllDetectorsPass(self): outWSName = 'diagnosticsWS' kwargs = { 'InputWorkspace': self._RAW_WS_NAME, 'OutputWorkspace': outWSName, 'EPPWorkspace': self._EPP_WS_NAME, 'BeamStopDiagnostics': 'Beam Stop Diagnostics OFF', 'DefaultMask': 'Default Mask OFF', 'rethrow': True } run_algorithm('DirectILLDiagnostics', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) inWS = mtd[self._RAW_WS_NAME] outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), inWS.getNumberHistograms()) self.assertEquals(outWS.blocksize(), 1) spectrumInfo = outWS.spectrumInfo() for i in range(outWS.getNumberHistograms()): self.assertEquals(outWS.readY(i)[0], 0) self.assertFalse(spectrumInfo.isMasked(i))
def testEmptyContainerSubtraction(self): ws = self._cloneTestWorkspace() ecWSName = 'testECWS_' ecWS = self._cloneTestWorkspace(ecWSName) ecFactor = 0.13 ecWS *= ecFactor outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'EmptyContainerWorkspace': ecWSName, 'rethrow': True } run_algorithm('DirectILLApplySelfShielding', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), ws.getNumberHistograms()) ys = outWS.extractY() originalYs = ws.extractY() numpy.testing.assert_almost_equal(ys, (1.0 - ecFactor) * originalYs)
def testExecSparseInstrument(self): self._setDefaultSample(self._TEST_WS_NAME) outWSName = 'correctionWS' kwargs = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'SimulationInstrument': 'Sparse Instrument', 'SparseInstrumentRows': 3, 'SparseInstrumentColumns': 2, 'NumberOfSimulatedWavelengths': 3, 'rethrow': True } run_algorithm('DirectILLSelfShielding', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) inWS = mtd[self._TEST_WS_NAME] outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), inWS.getNumberHistograms()) xs = outWS.extractX() originalXs = inWS.extractX() numpy.testing.assert_almost_equal(xs, originalXs[:, :])
def load_file_and_apply(self, filename, ws_name): Load(Filename=filename, OutputWorkspace=ws_name, FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if self._load_inst: LoadInstrument(Workspace=ws_name, Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if self._apply_cal: ApplyCalibration(Workspace=ws_name, PositionTable=self.getProperty("ApplyCalibration").value) if self._detcal: LoadIsawDetCal(InputWorkspace=ws_name, Filename=self.getProperty("DetCal").value) if self._copy_params: CopyInstrumentParameters(OutputWorkspace=ws_name, InputWorkspace=self.getProperty("CopyInstrumentParameters").value) if self._masking: if not mtd.doesExist('__mask'): LoadMask(Instrument=mtd[ws_name].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace=ws_name,MaskedWorkspace='__mask') if self.XMin != Property.EMPTY_DBL and self.XMax != Property.EMPTY_DBL: ConvertUnits(InputWorkspace=ws_name,OutputWorkspace=ws_name,Target='Momentum') CropWorkspaceForMDNorm(InputWorkspace=ws_name,OutputWorkspace=ws_name,XMin=self.XMin,XMax=self.XMax)