def testRawWorkspaceOutput(self): outWSName = 'outWS' rawWSName = 'rawWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'OutputRawWorkspace': rawWSName, 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] inWS = mtd[self._TEST_WS_NAME] self.assertTrue(mtd.doesExist(rawWSName)) rawWS = mtd[rawWSName] ys = rawWS.extractY() originalYS = inWS.extractY() numpy.testing.assert_almost_equal(ys, originalYS[1:, :]) es = rawWS.extractE() originalES = inWS.extractE() numpy.testing.assert_almost_equal(es, originalES[1:, :]) xs = rawWS.extractX() outXS = outWS.extractX() numpy.testing.assert_almost_equal(xs, outXS) Ei = rawWS.getRun().getProperty('Ei').value outEi = outWS.getRun().getProperty('Ei').value self.assertEqual(Ei, outEi) wavelength = outWS.getRun().getProperty('wavelength').value outWavelength = outWS.getRun().getProperty('wavelength').value self.assertEqual(wavelength, outWavelength)
def testSuccessWhenEverythingDisabled(self): outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'FlatBkg': 'Flat Bkg OFF', 'IncidentEnergyCalibration': 'Energy Calibration OFF', 'Normalisation': 'Normalisation OFF', 'ElasticChannel': 'Default Elastic Channel', 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] inWS = mtd[self._TEST_WS_NAME] self.assertEquals(outWS.getNumberHistograms(), inWS.getNumberHistograms() - 1) xs = outWS.extractX() originalXs = inWS.extractX() numpy.testing.assert_almost_equal(xs, originalXs[1:, :]) ys = outWS.extractY() originalYs = inWS.extractY() numpy.testing.assert_almost_equal(ys, originalYs[1:, :]) es = outWS.extractE() originalEs = inWS.extractE() numpy.testing.assert_almost_equal(es, originalEs[1:, :])
def test_LoadWavelength(self): outputWorkspaceName = "LoadDNSLegacyTest_Test8" filename = "dn134011vana.d_dat" alg_test = run_algorithm("LoadDNSLegacy", Filename=filename, Normalization='no', OutputWorkspace=outputWorkspaceName, CoilCurrentsTable=self.curtable, Wavelength=5.7) self.assertTrue(alg_test.isExecuted()) # Verify some values ws = AnalysisDataService.retrieve(outputWorkspaceName) # dimensions self.assertEqual(24, ws.getNumberHistograms()) self.assertEqual(2, ws.getNumDims()) # data array self.assertEqual(31461, ws.readY(1)) self.assertEqual(13340, ws.readY(23)) self.assertAlmostEqual(5.7, ws.readX(1)[0], 3) self.assertAlmostEqual(5.7, ws.readX(23)[0], 3) # sample logs run = ws.getRun() self.assertEqual(5.7, run.getProperty('wavelength').value) self.assertAlmostEqual(2.51782, run.getProperty('Ei').value, 3) run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName) return
def test_Fit_works_with_multidomain_functions(self): x1 = np.arange(10) y1 = np.empty(0) y2 = np.empty(0) y3 = np.empty(0) for x in x1: y1 = np.append(y1, 3) y2 = np.append(y2, 2.9 + 3*x) y3 = np.append(y3, 3.1 + 3*x*x) x = np.concatenate((x1,x1,x1)) y = np.concatenate((y1,y2,y3)) data_name = 'dataWS' run_algorithm('CreateWorkspace',OutputWorkspace=data_name,DataX=x, DataY=y,DataE=np.ones(30),NSpec=3,UnitX='TOF') f1 = ';name=UserFunction,$domains=i,Formula=a+b*x+c*x^2' func= 'composite=MultiDomainFunction,NumDeriv=1' + f1 + f1 + f1 + ';ties=(f2.a=f1.a=f0.a)' output_name = "fitWS" Fit(Function=func,InputWorkspace=data_name,WorkspaceIndex=0,Output=output_name, InputWorkspace_1=data_name,WorkspaceIndex_1=1,InputWorkspace_2=data_name,WorkspaceIndex_2=2) self.assertTrue(output_name + '_Parameters' in mtd) params = mtd[output_name+'_Parameters'] self.assertEqual(params.rowCount(), 10) self.assertAlmostEqual(params.row(0)['Value'], 3.0, 10) self.assertAlmostEqual(params.row(3)['Value'], 3.0, 10) self.assertAlmostEqual(params.row(6)['Value'], 3.0, 10) self.assertAlmostEqual(params.row(4)['Value'], 3.0, 1) self.assertAlmostEqual(params.row(8)['Value'], 3.0, 1)
def test_TwoTheta(self): # check whether the 2theta angles the same as in the data workspace outputWorkspaceName = "DNSDetCorrVanaTest_Test5" # rotate detector bank to different angles api.LoadInstrument(self.__dataws, InstrumentName='DNS') api.LoadInstrument(self.__vanaws, InstrumentName='DNS') api.LoadInstrument(self.__bkgrws, InstrumentName='DNS') api.RotateInstrumentComponent(self.__dataws, "bank0", X=0, Y=1, Z=0, Angle=-7.53) api.RotateInstrumentComponent(self.__vanaws, "bank0", X=0, Y=1, Z=0, Angle=-8.02) api.RotateInstrumentComponent(self.__bkgrws, "bank0", X=0, Y=1, Z=0, Angle=-8.54) # run correction alg_test = run_algorithm("DNSDetEffCorrVana", InputWorkspace=self.__dataws.getName(), OutputWorkspace=outputWorkspaceName, VanaWorkspace=self.__vanaws.getName(), BkgWorkspace=self.__bkgrws.getName()) self.assertTrue(alg_test.isExecuted()) # check dimensions and angles ws = AnalysisDataService.retrieve(outputWorkspaceName) # dimensions self.assertEqual(24, ws.getNumberHistograms()) self.assertEqual(2, ws.getNumDims()) # angles tthetas = np.array([7.53 + i*5 for i in range(24)]) for i in range(24): det = ws.getDetector(i) self.assertAlmostEqual(tthetas[i], np.degrees(ws.detectorSignedTwoTheta(det))) run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName) return
def test_setTitle(self): run_algorithm('CreateWorkspace', OutputWorkspace='ws1',DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF') ws1 = AnalysisDataService['ws1'] title = 'test_title' ws1.setTitle(title) self.assertEquals(title, ws1.getTitle()) AnalysisDataService.remove(ws1.getName())
def setUp(self): # Set up every time. config['default.instrument'] = 'IRIS' config["default.facility"] = "ISIS" # Only set up once. if not self.class_has_been_set_up: class_has_been_set_up = True # Create a workspace that is not a table workspace. pre_existing_matrix_workspace_alg = run_algorithm( "CreateWorkspace", OutputWorkspace='matrix_ws', DataX='0', DataY='1') self.__pre_existing_matrix_workspace_name = \ pre_existing_matrix_workspace_alg.getPropertyValue("OutputWorkspace") # Create an empty table workspace. table_workspace_alg = run_algorithm( "CreateEmptyTableWorkspace", OutputWorkspace='__empty_table') self.__empty_table_workspace_name = \ table_workspace_alg.getPropertyValue("OutputWorkspace") self.__existing_range_of_run_files = '21360, 26173, 38633' self.__nonexistant_run_file = '99999'
def test_LoadTOF(self): outputWorkspaceName = "LoadDNSLegacyTest_Test7" filename = "dnstof.d_dat" tof1 = 424.668 # must be changed if L1 will change alg_test = run_algorithm("LoadDNSLegacy", Filename=filename, Normalization='no', OutputWorkspace=outputWorkspaceName) self.assertTrue(alg_test.isExecuted()) # Verify some values ws = AnalysisDataService.retrieve(outputWorkspaceName) # dimensions self.assertEqual(24, ws.getNumberHistograms()) self.assertEqual(100, ws.getNumberBins()) # data array self.assertEqual(8, ws.readY(19)[37]) # must be changed after comissioning will be finished self.assertAlmostEqual(tof1, ws.readX(0)[0], 3) self.assertAlmostEqual(tof1+40.1*100, ws.readX(0)[100], 3) # sample logs run = ws.getRun() self.assertEqual(-7.5, run.getProperty('deterota').value) self.assertEqual(100, run.getProperty('tof_channels').value) self.assertEqual(51428, run.getProperty('mon_sum').value) self.assertEqual('z', run.getProperty('polarisation').value) self.assertEqual(33, run.getProperty('EPP').value) # check that EPP is taken from file self.assertEqual('7', str(run.getProperty('polarisation_comment').value)) self.assertEqual('no', run.getProperty('normalized').value) # check whether detector bank is rotated det = ws.getDetector(0) self.assertAlmostEqual(7.5, ws.detectorSignedTwoTheta(det)*180/pi) run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName) return
def test_LoadNormalizeToDuration(self): outputWorkspaceName = "LoadDNSLegacyTest_Test1" filename = "dn134011vana.d_dat" alg_test = run_algorithm("LoadDNSLegacy", Filename=filename, Normalization='duration', OutputWorkspace=outputWorkspaceName, CoilCurrentsTable=self.curtable) self.assertTrue(alg_test.isExecuted()) # Verify some values ws = AnalysisDataService.retrieve(outputWorkspaceName) # dimensions self.assertEqual(24, ws.getNumberHistograms()) self.assertEqual(2, ws.getNumDims()) # data array self.assertAlmostEqual(31461.0/600.0, ws.readY(1)) self.assertAlmostEqual(13340.0/600.0, ws.readY(23)) # sample logs run = ws.getRun() self.assertEqual(-8.54, run.getProperty('deterota').value) self.assertEqual(8332872, run.getProperty('mon_sum').value) self.assertEqual('duration', run.getProperty('normalized').value) # check whether detector bank is rotated det = ws.getDetector(0) self.assertAlmostEqual(8.54, ws.detectorSignedTwoTheta(det)*180/pi) run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName) return
def test_save_one_histogram(self): """ Test to Save one histogram """ datawsname = "TestOneHistogram" E, I, err = self._createOneHistogram(datawsname) # Execute out_path = "tempout_hist.json" alg_test = run_algorithm( "SavePlot1DAsJson", InputWorkspace = datawsname, JsonFilename = out_path) # Executed? self.assertTrue(alg_test.isExecuted()) # Verify .... d = json.load(open(out_path))[datawsname] self._checkData(d, E, I, err) # test overwrite alg_test = run_algorithm( "SavePlot1DAsJson", InputWorkspace = datawsname, JsonFilename = out_path) # Delete the output file os.remove(out_path) return
def testMaskedComponents(self): inWS = mtd[self._RAW_WS_NAME] spectraCount = inWS.getNumberHistograms() outWSName = 'diagnosticsWS' kwargs = { 'InputWorkspace': self._RAW_WS_NAME, 'OutputWorkspace': outWSName, 'ElasticPeakDiagnostics': 'Peak Diagnostics OFF', 'BkgDiagnostics': 'Bkg Diagnostics OFF', 'BeamStopDiagnostics': 'Beam Stop Diagnostics OFF', 'DefaultMask': 'Default Mask OFF', 'MaskedComponents': 'tube_1', 'rethrow': True } run_algorithm('DirectILLDiagnostics', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), spectraCount) self.assertEquals(outWS.blocksize(), 1) for i in range(spectraCount): Ys = outWS.readY(i) detector = outWS.getDetector(i) componentName = detector.getFullName() if 'tube_1' in componentName: self.assertEquals(Ys[0], 1) else: self.assertEquals(Ys[0], 0)
def testSelfShieldingCorrections(self): ws = self._cloneTestWorkspace() corrFactor = 0.789 corrWS = self._cloneTestWorkspace('correctionWS') for i in range(corrWS.getNumberHistograms()): ys = corrWS.dataY(i) ys.fill(corrFactor) es = corrWS.dataE(i) es.fill(0) outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'SelfShieldingCorrectionWorkspace': corrWS, 'rethrow': True } run_algorithm('DirectILLApplySelfShielding', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), ws.getNumberHistograms()) ys = outWS.extractY() originalYs = ws.extractY() numpy.testing.assert_almost_equal(ys, originalYs / corrFactor) es = outWS.extractE() originalEs = ws.extractE() numpy.testing.assert_almost_equal(es, originalEs / corrFactor)
def setUp(self): if self.__class__._no_peak_ws == None: # Create test workspace with no peaks dataX = [0, 1] dataY = [0] dataE = [0] nSpec = 1 no_peak_ws_alg = run_algorithm("CreateWorkspace", DataX=dataX, DataY=dataY, DataE=dataE, NSpec=nSpec, UnitX="Wavelength", VerticalAxisUnit="SpectraNumber", OutputWorkspace="no_peak_ws") self.__class__._no_peak_ws = no_peak_ws_alg.getPropertyValue("OutputWorkspace") # Create test workspace with a single peak dataX = [0, 1, 0, 1, 0, 1] # Setup enough X values {0, 1} to create a histo workspace with a single bin. dataY = [0, 1, 0] # One real peak dataE = [0, 0, 0] # Errors are not considered in the algorithm nSpec = 3 one_peak_ws_alg = run_algorithm("CreateWorkspace", DataX=dataX, DataY=dataY, DataE=dataE, NSpec=nSpec, UnitX="Wavelength", VerticalAxisUnit="SpectraNumber", OutputWorkspace="one_peak_ws") self.__class__._one_peak_ws = one_peak_ws_alg.getPropertyValue("OutputWorkspace") # Create test workspace with two peaks dataX = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1] # Setup enough X values {0, 1} to create a histo workspace with a single bin. dataY = [0.1, 1, 0.1, 0.2, 0.1, 2, 0.1] # Real peaks with values 1, 2, false peak with value 0.2 dataE = [0, 0, 0, 0, 0, 0, 0] # Errors are not considered in the algorithm nSpec = 7 two_peak_ws_alg = run_algorithm("CreateWorkspace", DataX=dataX, DataY=dataY, DataE=dataE, NSpec=nSpec, UnitX="Wavelength", VerticalAxisUnit="SpectraNumber", OutputWorkspace="two_peak_ws") self.__class__._two_peak_ws = two_peak_ws_alg.getPropertyValue("OutputWorkspace") # Create test workspace with three peaks dataX = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1] # Setup enough X values {0, 1} to create a histo workspace with a single bin. dataY = [0, 1, 0, 1, 0, 1, 0, 1, 0] # 3 real peaks dataE = [0, 0, 0, 0, 0, 0, 0, 0, 0] # Errors are not considered in the algorithm nSpec = 9 three_peak_ws_alg = run_algorithm("CreateWorkspace", DataX=dataX, DataY=dataY, DataE=dataE, NSpec=nSpec, UnitX="Wavelength", VerticalAxisUnit="SpectraNumber", OutputWorkspace="three_peak_ws") self.__class__._three_peak_ws = three_peak_ws_alg.getPropertyValue("OutputWorkspace")
def test_ChildAlg_call_with_output_and_input_ws_the_same_succeeds(self): data = [1.0] api.CreateWorkspace(DataX=data,DataY=data,NSpec=1,UnitX='Wavelength', OutputWorkspace=self._ws_name) try: run_algorithm('PythonAlgorithmChildAlgCallTestAlg', InputWorkspace=self._ws_name, OutputWorkspace=self._ws_name) except Exception,exc: self.fail("Algorithm call failed: %s" % str(exc))
def test_save_one_histogram(self): """ Test to Save one histogram """ datawsname = "TestOneHistogram" E, I, err = self._createOneHistogram(datawsname) # Execute out_path = "tempout_hist.json" alg_test = run_algorithm( "SavePlot1DAsJson", InputWorkspace = datawsname, JsonFilename = out_path) # Executed? self.assertTrue(alg_test.isExecuted()) # Verify .... d = json.load(open(out_path)) d0 = d[datawsname+'0'] # plots are numbered np.testing.assert_array_equal(d0['x'], E) np.testing.assert_array_equal(d0['y'], I) np.testing.assert_array_equal(d0['e'], err) # test overwrite alg_test = run_algorithm( "SavePlot1DAsJson", InputWorkspace = datawsname, JsonFilename = out_path) # Delete the output file os.remove(out_path) return
def test_updateDouble(self): """ Test for update a double value """ # tablews = self.create_TableWorkspace() alg_init = run_algorithm("CreateEmptyTableWorkspace", OutputWorkspace="TestTableWorkspace") self.assertTrue(alg_init.isExecuted()) tablews = AnalysisDataService.retrieve("TestTableWorkspace") tablews.addColumn("str", "Name") tablews.addColumn("double", "Value") tablews.addColumn("str", "FitOrTie") tablews.addRow(["A", 1.34, "Fit"]) tablews.addRow(["B", 2.34, "Tie"]) tablews.addRow(["S", 3.34, "Tie"]) alg_test = run_algorithm("UpdatePeakParameterTableValue", InputWorkspace=alg_init.getPropertyValue("OutputWorkspace"), Column="Value", ParameterNames=["A"], NewFloatValue=1.00) self.assertTrue(alg_test.isExecuted()) newvalue_A = tablews.cell(0, 1) self.assertEqual(newvalue_A, 1.00) return
def test_DNSMomentumTransfer(self): outputWorkspaceName = "DNSMergeRunsTest_Test4" alg_test = run_algorithm("DNSMergeRuns", WorkspaceNames=self.workspaces, OutputWorkspace=outputWorkspaceName, HorizontalAxis='|Q|') self.assertTrue(alg_test.isExecuted()) # check whether the data are correct ws = AnalysisDataService.retrieve(outputWorkspaceName) # dimensions self.assertEqual(96, ws.blocksize()) self.assertEqual(2, ws.getNumDims()) self.assertEqual(1, ws.getNumberHistograms()) # data array # reference values ttheta = np.round(np.radians(self.angles), 4) qarr = np.sort(4.0*np.pi*np.sin(0.5*ttheta)/4.2) # read the merged values dataX = ws.extractX()[0] for i in range(len(self.angles)): self.assertAlmostEqual(qarr[i], dataX[i]) # check that the intensity has not been changed dataY = ws.extractY()[0] for i in range(len(dataY)): self.assertAlmostEqual(1.0, dataY[i]) run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName) return
def test_LoadInValidValues(self): outputWorskapceName = "LoadLogPropertyTableTest_Test4" #invalid log name ExecptionThrownOnBadLogName = False try: alg_test = run_algorithm("LoadLogPropertyTable", FirstFile = "emu00006473.nxs", LastFile = "emu00006475.nxs", LogNames="WrongTemp", OutputWorkspace = outputWorskapceName) except RuntimeError: ExecptionThrownOnBadLogName = True self.assertEqual(True, ExecptionThrownOnBadLogName) #invalid first file ExecptionThrownOnBadFileParameter = False try: alg_test = run_algorithm("LoadLogPropertyTable", FirstFile = "emu0000000.nxs", LastFile = "emu00006475.nxs", LogNames="Temp_Sample", OutputWorkspace = outputWorskapceName) self.assertFalse(alg_test.isExecuted()) except: ExecptionThrownOnBadFileParameter = True self.assertEqual(True,ExecptionThrownOnBadFileParameter) #invalid last file ExecptionThrownOnBadFileParameter = False try: alg_test = run_algorithm("LoadLogPropertyTable", FirstFile = "emu00006473.nxs", LastFile = "emu9999999.nxs", LogNames="Temp_Sample", OutputWorkspace = outputWorskapceName) self.assertFalse(alg_test.isExecuted()) except: ExecptionThrownOnBadFileParameter = True self.assertEqual(True,ExecptionThrownOnBadFileParameter) return
def test_len_decreases_when_item_removed(self): wsname = 'ADSTest_test_len_decreases_when_item_removed' run_algorithm('Load', Filename='LOQ48127.raw', OutputWorkspace=wsname, SpectrumMax=1) self.assertEquals(len(analysis_data_svc), 1) # Remove to clean the test up del analysis_data_svc[wsname] self.assertEquals(len(analysis_data_svc), 0)
def test_LoadValidFilesComments(self): outputWorskapceName = "LoadLogPropertyTableTest_Test1" alg_test = run_algorithm( "LoadLogPropertyTable", FirstFile="MUSR00015189.nxs", LastFile="MUSR00015193.nxs", LogNames="comment", OutputWorkspace=outputWorskapceName, ) self.assertTrue(alg_test.isExecuted()) # Verify some values tablews = AnalysisDataService.retrieve(outputWorskapceName) self.assertEqual(5, tablews.rowCount()) self.assertEqual(2, tablews.columnCount()) self.assertEqual("18.95MHz 100W", tablews.cell(0, 1)) self.assertEqual(15189, tablews.cell(0, 0)) self.assertEqual(15193, tablews.cell(4, 0)) run_algorithm("DeleteWorkspace", Workspace=outputWorskapceName) return
def testZeroMasking(self): ws = self._cloneTestWorkspace() zeroIndices = [5, 23] for i in zeroIndices: ws.dataY(i).fill(0.) eppWSName = 'eppWS' self._EPPTable(ws, eppWSName) outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'EPPWorkspace': eppWSName, 'rethrow': True } run_algorithm('DirectILLIntegrateVanadium', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), ws.getNumberHistograms()) self.assertEquals(outWS.blocksize(), 1) spectrumInfo = outWS.spectrumInfo() for i in range(outWS.getNumberHistograms()): if i in zeroIndices: self.assertEquals(outWS.readY(i)[0], 0.) self.assertTrue(spectrumInfo.isMasked(i)) else: self.assertGreater(outWS.readY(i)[0], 0.) self.assertFalse(spectrumInfo.isMasked(i))
def test_LoadPartiallyValidFilesMultipleLogValues(self): outputWorskapceName = "LoadLogPropertyTableTest_Test2" alg_test = run_algorithm( "LoadLogPropertyTable", FirstFile="emu00006473.nxs", LastFile="emu00006475.nxs", LogNames="Temp_Sample,dur", OutputWorkspace=outputWorskapceName, ) self.assertTrue(alg_test.isExecuted()) # Verify some values tablews = AnalysisDataService.retrieve(outputWorskapceName) self.assertEqual(2, tablews.rowCount()) self.assertEqual(3, tablews.columnCount()) self.assertEqual(6473, tablews.cell(0, 0)) self.assertAlmostEqual(200.078, tablews.cell(0, 1), 2) self.assertEqual("8697", tablews.cell(0, 2)) self.assertEqual(6475, tablews.cell(1, 0)) self.assertAlmostEqual(283.523, tablews.cell(1, 1), 2) self.assertEqual("5647", tablews.cell(1, 2)) run_algorithm("DeleteWorkspace", Workspace=outputWorskapceName) return
def testDetectorGroupingWithUserGivenAngleStep(self): ws = illhelpers.create_poor_mans_in5_workspace(0.0, _groupingTestDetectors) nhisto = ws.getNumberHistograms() spectrumInfo = ws.spectrumInfo() minAngle = 180. maxAngle = 0. for i in range(nhisto): angle = numpy.rad2deg(spectrumInfo.twoTheta(i)) minAngle = min(minAngle, angle) maxAngle = max(maxAngle, angle) mtd.addOrReplace('inWS', ws) outWSName = 'unused' outSThetaWName = 'SofThetaW' angleStep = 0.2 algProperties = { 'InputWorkspace': ws, 'OutputWorkspace': outWSName, 'GroupingAngleStep': angleStep, 'OutputSofThetaEnergyWorkspace': outSThetaWName, 'Transposing': 'Transposing OFF', 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) self.assertTrue(outSThetaWName in mtd) SThetaWWS = mtd[outSThetaWName] spectrumInfo = SThetaWWS.spectrumInfo() firstAngleBin = int(minAngle / angleStep) lastAngleBin = int(maxAngle / angleStep) + 1 expected = lastAngleBin - firstAngleBin self.assertEqual(spectrumInfo.size(), expected)
def test_profileCoeffsNotSavedWhenNotRefined(self): run_algorithm(self.ALG_NAME, **self.defaultAlgParams) with h5py.File(self.TEMP_FILE_NAME, "r") as output_file: fit_results_group = output_file["Bank 1"]["GSAS-II Fitting"] self.assertFalse("Profile Coefficients" in fit_results_group)
def test_exportFileAppend(self): """ Test to export logs without header file """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm # create new file alg_test = run_algorithm("ExportExperimentLog", InputWorkspace = "TestMatrixWS", OutputFilename = "TestRecord.txt", SampleLogNames = ["run_number", "duration", "proton_charge"], SampleLogTitles = ["RUN", "Duration", "ProtonCharge"], SampleLogOperation = [None, None, "sum"], FileMode = "new") # append alg_test = run_algorithm("ExportExperimentLog", InputWorkspace = "TestMatrixWS", OutputFilename = "TestRecord.txt", SampleLogNames = ["run_number", "duration", "proton_charge"], SampleLogTitles = ["RUN", "Duration", "ProtonCharge"], SampleLogOperation = [None, None, "sum"], FileMode = "fastappend") # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: print "Output file is %s. " % (outfilename) ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print "Unable to open file %s. " % (outfilename) self.assertTrue(False) return # Last line cannot be empty, i.e., before EOF '\n' is not allowed lastline = lines[-1] self.assertTrue(len(lastline.strip()) > 0) # Number of lines self.assertEquals(len(lines), 3) # Check line firstdataline = lines[1] terms = firstdataline.strip().split("\t") self.assertEquals(len(terms), 3) # # # Remove generated files os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS") return
def testValidateInputWorkspace3(self): self._args['InputWorkspace'] = self._ws_shift self._args['InputWorkspace3'] = self._ws_in_3 self._args['OutputWorkspace'] = 'output' self.assertTrue(sys.version_info >= (2, 7)) with self.assertRaises(RuntimeError) as contextManager: run_algorithm('MatchPeaks', **self._args) self.assertEqual('Some invalid Properties found', str(contextManager.exception))
def test_calfile_from_masked_workspace_inverse(self): run_algorithm('Load', Filename='LOQ49886.nxs', OutputWorkspace='LOQ49886', rethrow=True) run_algorithm('MaskDetectors', Workspace='LOQ49886',WorkspaceIndexList='0-700', MaskedWorkspace='LOQ49886', rethrow=True) masked_workspace = mtd['LOQ49886'] should_invert = True masking_identifier = 1 not_masking_identifier = 0 self.do_test_cal_file(masked_workspace, should_invert, masking_identifier, not_masking_identifier, 700)
def test_calfile_from_masked_workspace_inverse(self): run_algorithm('CreateSampleWorkspace', OutputWorkspace='wsMaskWSToCalFileTest', rethrow=True) run_algorithm('MaskDetectors', Workspace='wsMaskWSToCalFileTest',WorkspaceIndexList='0-100', MaskedWorkspace='wsMaskWSToCalFileTest', rethrow=True) masked_workspace = mtd['wsMaskWSToCalFileTest'] should_invert = True masking_identifier = 1 not_masking_identifier = 0 self.do_test_cal_file(masked_workspace, should_invert, masking_identifier, not_masking_identifier, 100)
def test_complex_binary_ops_do_not_leave_temporary_workspaces_behind(self): run_algorithm('CreateWorkspace', OutputWorkspace='ca', DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF') ads = AnalysisDataService w1=(ads['ca']*0.0)+1.0 self.assertTrue('w1' in ads) self.assertTrue('ca' in ads) self.assertTrue('__python_op_tmp0' not in ads)
def _add_natural_angle_step_parameter(ws, step=1.0): kwargs = { 'Workspace': ws, 'ParameterName': 'natural-angle-step', 'ParameterType': 'Number', 'Value': str(step) } run_algorithm('SetInstrumentParameter', **kwargs)
def test_default_goniometer(self): """ Default goniometer is the identity matrix """ alg = run_algorithm('CreateWorkspace', DataX=[1, 2, 3, 4, 5], DataY=[1, 2, 3, 4, 5], NSpec=1, child=True) ws = alg.getProperty("OutputWorkspace").value run = ws.run() g = run.getGoniometer() self.assertTrue(isinstance(g, Goniometer)) self.assertTrue((g.getR() == np.identity(3)).all())
def testNormalisationToTime(self): outWSName = 'outWS' duration = 3612.3 mtd[self._TEST_WS_NAME].mutableRun().addProperty( 'duration', duration, True) algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'FlatBkg': 'Flat Bkg OFF', 'IncidentEnergyCalibration': 'Energy Calibration OFF', 'Normalisation': 'Normalisation Time', 'rethrow': True } run_algorithm('DirectILLCollectData', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] inWS = mtd[self._TEST_WS_NAME] ys = outWS.extractY() originalYs = inWS.extractY() numpy.testing.assert_almost_equal(ys, originalYs[:-1, :] / duration) es = outWS.extractE() originalEs = inWS.extractE() numpy.testing.assert_almost_equal(es, originalEs[:-1, :] / duration)
def test_LoadValidFilesComments(self): outputWorskapceName = "LoadLogPropertyTableTest_Test1" alg_test = run_algorithm("LoadLogPropertyTable", FirstFile="MUSR00015189.nxs", LastFile="MUSR00015193.nxs", LogNames="comment", OutputWorkspace=outputWorskapceName) self.assertTrue(alg_test.isExecuted()) #Verify some values tablews = AnalysisDataService.retrieve(outputWorskapceName) self.assertEqual(5, tablews.rowCount()) self.assertEqual(2, tablews.columnCount()) self.assertEqual("18.95MHz 100W", tablews.cell(0, 1)) self.assertEqual(15189, tablews.cell(0, 0)) self.assertEqual(15193, tablews.cell(4, 0)) run_algorithm("DeleteWorkspace", Workspace=outputWorskapceName) return
def setUp(self): if not self._testIN5WS: self._testIN5WS = illhelpers.create_poor_mans_in5_workspace( self._BKG_LEVEL, illhelpers.default_test_detectors) inWSName = 'inputWS' mtd.addOrReplace(inWSName, self._testIN5WS) kwargs = { 'InputWorkspace': self._testIN5WS, 'OutputWorkspace': self._TEST_WS_NAME, 'OutputEPPWorkspace': self._EPP_WS_NAME } run_algorithm('DirectILLCollectData', **kwargs) kwargs = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': self._VANADIUM_WS_NAME, 'EPPWorkspace': self._EPP_WS_NAME } run_algorithm('DirectILLIntegrateVanadium', **kwargs) vanadiumWS = mtd[self._VANADIUM_WS_NAME] for i in range(vanadiumWS.getNumberHistograms()): vanadiumYs = vanadiumWS.dataY(i) vanadiumYs.fill(1.0) mtd.remove(inWSName)
def _run_createws(self, wsname): """ Run create workspace storing the output in the named workspace """ data = [1.0, 2.0, 3.0] alg = run_algorithm('CreateWorkspace', DataX=data, DataY=data, NSpec=1, UnitX='Wavelength', child=True) AnalysisDataService.addOrReplace( wsname, alg.getProperty("OutputWorkspace").value)
def test_LoadPartiallyValidFilesMultipleLogValues(self): outputWorskapceName = "LoadLogPropertyTableTest_Test2" alg_test = run_algorithm("LoadLogPropertyTable", FirstFile = "emu00006473.nxs", LastFile = "emu00006475.nxs", LogNames="Temp_Sample,dur", OutputWorkspace = outputWorskapceName) self.assertTrue(alg_test.isExecuted()) #Verify some values tablews = AnalysisDataService.retrieve(outputWorskapceName) self.assertEqual(2, tablews.rowCount()) self.assertEqual(3, tablews.columnCount()) self.assertEqual(6473, tablews.cell(0,0)) self.assertAlmostEqual(200.078, tablews.cell(0,1),2) self.assertEqual("8697", tablews.cell(0,2)) self.assertEqual(6475, tablews.cell(1,0)) self.assertAlmostEqual(283.523, tablews.cell(1,1),2) self.assertEqual("5647", tablews.cell(1,2)) run_algorithm("DeleteWorkspace", Workspace = outputWorskapceName) return
def test_sum(self): outputWorkspaceName = "output_ws" alg_test = run_algorithm("ComputeCalibrationCoefVan", VanadiumWorkspace=self._input_ws, EPPTable=self._table, OutputWorkspace=outputWorkspaceName) self.assertTrue(alg_test.isExecuted()) wsoutput = AnalysisDataService.retrieve(outputWorkspaceName) for i in range(wsoutput.getNumberHistograms()): self.assertEqual(100., wsoutput.readY(i)[0]) self.assertEqual(10., wsoutput.readE(i)[0]) DeleteWorkspace(wsoutput)
def test_exportFileAndHeader(self): """ Test to export logs without header file """ import os import os.path # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm( "ExportSampleLogsToCSVFile", InputWorkspace="TestMatrixWS", OutputFilename="furnace20339.txt", SampleLogNames=["SensorA", "SensorB", "SensorC"], WriteHeaderFile=True, Header="SensorA[K]\t SensorB[K]\t SensorC[K]") # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value filepath = os.path.dirname(outfilename) basename = os.path.basename(outfilename) baseheadername = basename.split(".")[0] + "_header.txt" headerfilename = os.path.join(filepath, baseheadername) try: ifile = open(headerfilename) lines = ifile.readlines() ifile.close() except IOError as err: errmsg = "Unable to open header file %s. " % (headerfilename) self.assertEquals(errmsg, "") return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0: goodlines += 1 self.assertEquals(goodlines, 3) # Clean os.remove(outfilename) os.remove(headerfilename) AnalysisDataService.remove("TestMatrixWS") return
def testBackgroundDiagnostics(self): rawWS = mtd[self._RAW_WS_NAME] spectraCount = rawWS.getNumberHistograms() highBkgIndices = [0, int(spectraCount / 3), spectraCount - 1] for i in highBkgIndices: ys = rawWS.dataY(i) ys += 10.0 * self._BKG_LEVEL lowBkgIndices = [int(spectraCount / 4), int(2 * spectraCount / 3)] for i in lowBkgIndices: ys = rawWS.dataY(i) ys -= self._BKG_LEVEL outWSName = 'diagnosticsWS' kwargs = { 'InputWorkspace': self._RAW_WS_NAME, 'OutputWorkspace': outWSName, 'ElasticPeakDiagnostics': 'Peak Diagnostics OFF', 'EPPWorkspace': self._EPP_WS_NAME, 'BkgDiagnostics': 'Bkg Diagnostics ON', 'NoisyBkgLowThreshold': 0.01, 'NoisyBkgHighThreshold': 9.99, 'BeamStopDiagnostics': 'Beam Stop Diagnostics OFF', 'DefaultMask': 'Default Mask OFF', 'rethrow': True } run_algorithm('DirectILLDiagnostics', **kwargs) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEqual(outWS.getNumberHistograms(), spectraCount) self.assertEqual(outWS.blocksize(), 1) spectrumInfo = outWS.spectrumInfo() for i in range(spectraCount): self.assertFalse(spectrumInfo.isMasked(i)) ys = outWS.readY(i) if i in highBkgIndices + lowBkgIndices: self.assertEqual(ys[0], 1) else: self.assertEqual(ys[0], 0)
def test1(self): """CreateCacheFilename: one prop """ pm = PropertyManager() pm.declareProperty("a", 0) pm.setProperty("a", 3) mantid.PropertyManagerDataService.add("pm", pm) # Execute alg_test = run_algorithm( "CreateCacheFilename", PropertyManager="pm", Properties=[], OtherProperties=[], Prefix="", CacheDir="", ) # executed? self.assertTrue(alg_test.isExecuted()) # Verify .... expected = os.path.join( ConfigService.getUserPropertiesDir(), "cache", "%s.nxs" % hashlib.sha1("a=3".encode('utf-8')).hexdigest()) self.assertEqual(alg_test.getPropertyValue("OutputFilename"), expected) # Another test. don't specify the default values alg_test = run_algorithm( "CreateCacheFilename", PropertyManager="pm", ) # executed? self.assertTrue(alg_test.isExecuted()) # Verify .... expected = os.path.join( ConfigService.getUserPropertiesDir(), "cache", "%s.nxs" % hashlib.sha1("a=3".encode('utf-8')).hexdigest()) self.assertEqual(alg_test.getPropertyValue("OutputFilename"), expected) return
def test_calfile_from_extracted_masking_workspace_inverse(self): run_algorithm('CreateSampleWorkspace', OutputWorkspace='wsMaskWSToCalFileTest', rethrow=True) run_algorithm('MaskDetectors', Workspace='wsMaskWSToCalFileTest',WorkspaceIndexList='0-100', MaskedWorkspace='wsMaskWSToCalFileTest', rethrow=True) run_algorithm('ExtractMask', InputWorkspace='wsMaskWSToCalFileTest', OutputWorkspace='ExtractedWorkspace', rethrow=True) extracted_workspace = mtd['ExtractedWorkspace'] should_invert = True masking_identifier = 1 not_masking_identifier = 0 self.do_test_cal_file(extracted_workspace, should_invert, masking_identifier, not_masking_identifier, 100)
def test_calfile_from_grouped_masked_workspace(self): run_algorithm('CreateSampleWorkspace', OutputWorkspace='wsMaskWSToCalFileTest', rethrow=True) run_algorithm('MaskDetectors', Workspace='wsMaskWSToCalFileTest',WorkspaceIndexList='0-100', MaskedWorkspace='wsMaskWSToCalFileTest', rethrow=True) run_algorithm('GroupDetectors', InputWorkspace='wsMaskWSToCalFileTest', OutputWorkspace='wsMaskWSToCalFileTest', WorkspaceIndexList='0-100', KeepUngroupedSpectra=True, rethrow=True) masked_workspace = mtd['wsMaskWSToCalFileTest'] should_invert = False masking_identifier = 0 not_masking_identifier = 1 self.do_test_cal_file(masked_workspace, should_invert, masking_identifier, not_masking_identifier, 0)
def test_exportFileMissingLog(self): """ Test to export logs without header file """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm( "ExportSampleLogsToCSVFile", InputWorkspace="TestMatrixWS", OutputFilename="furnace20335.txt", SampleLogNames=["SensorA", "SensorB", "SensorX", "SensorC"], WriteHeaderFile=False) # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print("Unable to open file {0}.".format(outfilename)) self.assertTrue(False) return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0: goodlines += 1 self.assertEquals(goodlines, 25) # Check values line0 = lines[0] terms = line0.split() self.assertEquals(len(terms), 6) value2 = float(terms[4]) self.assertEquals(value2, 0.) # Clean os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS") return
def test_temperature_from_sample_log(self): self._input_ws.mutableRun().addProperty('temperature', 0.0, True) outputWorkspaceName = "output_ws" EditInstrumentGeometry(self._input_ws, L2="4,8", Polar="0,15", Azimuthal="0,0", DetectorIDs="1,2") alg_test = run_algorithm("ComputeCalibrationCoefVan", VanadiumWorkspace=self._input_ws, EPPTable=self._table, OutputWorkspace=outputWorkspaceName) self.assertTrue(alg_test.isExecuted()) wsoutput = AnalysisDataService.retrieve(outputWorkspaceName) self._checkDWF(wsoutput, 0.0) DeleteWorkspace(wsoutput)
def test_two_wing_multi(self): args = {'Run': self._runs_two_wing_multi, 'OutputWorkspace': 'out'} alg_test = run_algorithm('IndirectILLReductionQENS', **args) self.assertTrue(alg_test.isExecuted(), "IndirectILLReductionQENS not executed") self._check_workspace_group(mtd['out_red'], 2, 18, 1024) alg_test = run_algorithm('IndirectILLReductionQENS', **args) self.assertTrue(alg_test.isExecuted(), "IndirectILLReductionQENS not executed") self._check_workspace_group(mtd['out_red'], 2, 18, 1024) args['SumRuns'] = True alg_test = run_algorithm('IndirectILLReductionQENS', **args) self.assertTrue(alg_test.isExecuted(), "IndirectILLReductionQENS not executed") self._check_workspace_group(mtd['out_red'], 1, 18, 1024)
def test_fit_cubic_spline_with_gauss_conv_produces_fit_with_same_range_as_binning_for_calc( self): binning_for_calc = "0.2,0.1,3.0" binning_for_fit = "0.2,0.1,4.0" alg_test = run_algorithm("FitIncidentSpectrum", InputWorkspace=self.incident_wksp, OutputWorkspace="fit_wksp", BinningForCalc=binning_for_calc, BinningForFit=binning_for_fit, FitSpectrumWith="GaussConvCubicSpline") self.assertTrue(alg_test.isExecuted()) fit_wksp = AnalysisDataService.retrieve("fit_wksp") self.assertEqual( fit_wksp.readX(0).all(), np.arange(0.2, 3, 0.01).all())
def test_addOrReplace_replaces_workspace_with_existing_name(self): data = [1.0, 2.0, 3.0] alg = run_algorithm('CreateWorkspace', DataX=data, DataY=data, NSpec=1, UnitX='Wavelength', child=True) name = "testws" ws = alg.getProperty("OutputWorkspace").value AnalysisDataService.add(name, ws) len_before = len(AnalysisDataService) AnalysisDataService.addOrReplace(name, ws) len_after = len(AnalysisDataService) self.assertEquals(len_after, len_before)
def test_otherprops_only(self): """CreateCacheFilename: other_props only """ # Execute alg_test = run_algorithm( "CreateCacheFilename", OtherProperties=["a=1", "b=2"], ) # executed? self.assertTrue(alg_test.isExecuted()) # Verify .... expected = os.path.join(ConfigService.getUserPropertiesDir(), "cache", "%s.nxs" % hashlib.sha1("a=1,b=2").hexdigest()) self.assertEqual(alg_test.getPropertyValue("OutputFilename"), expected) return
def refl_rotate_detector(ws, angle): r = ws.run().getProperty('det.value').value * 1e-3 angle = numpy.deg2rad(angle) z = r * numpy.cos(angle) y = r * numpy.sin(angle) args = { 'Workspace': ws, 'ComponentName': 'detector', 'X': 0., 'Y': y, 'Z': z, 'RelativePosition': False } run_algorithm('MoveInstrumentComponent', **args) args = { 'Workspace': ws, 'ComponentName': 'detector', 'X': 1., 'Y': 0., 'Z': 0., 'Angle': numpy.rad2deg(angle), 'RelativeRotation': False } run_algorithm('RotateInstrumentComponent', **args)
def test_DNSTwoTheta_Groups(self): outputWorkspaceName = "DNSMergeRunsTest_Test3" group = api.GroupWorkspaces(self.workspaces) alg_test = run_algorithm("DNSMergeRuns", WorkspaceNames='group', OutputWorkspace=outputWorkspaceName, HorizontalAxis='2theta') self.assertTrue(alg_test.isExecuted()) # check whether the data are correct ws = AnalysisDataService.retrieve(outputWorkspaceName) # dimensions self.assertEqual(96, ws.blocksize()) self.assertEqual(2, ws.getNumDims()) self.assertEqual(1, ws.getNumberHistograms()) # data array # read the merged values dataX = ws.extractX()[0] for i in range(len(self.angles)): self.assertAlmostEqual(self.angles[i], dataX[i]) # check that the intensity has not been changed dataY = ws.extractY()[0] for i in range(len(dataY)): self.assertAlmostEqual(1.0, dataY[i]) run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName) return
def testEmptyContainerSubtractionWithScaling(self): ws = self._cloneTestWorkspace() ecWSName = 'testECWS_' ecWS = self._cloneTestWorkspace(ecWSName) ecFactor = 0.13 ecWS *= ecFactor outWSName = 'outWS' ecScaling = 0.876 algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'EmptyContainerWorkspace': ecWSName, 'EmptyContainerScaling': ecScaling, 'rethrow': True } run_algorithm('DirectILLApplySelfShielding', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEquals(outWS.getNumberHistograms(), ws.getNumberHistograms()) ys = outWS.extractY() originalYs = ws.extractY() numpy.testing.assert_almost_equal(ys, (1.0 - ecScaling * ecFactor) * originalYs)
def test_DNSVanadiumCorrection_Masked(self): outputWorkspaceName = "DNSComputeDetCorrCoefsTest_Test3" vanalist = [self.sfvanaws.getName(), self.nsfvanaws.getName()] bglist = [self.sfbkgrws.getName(), self.nsfbkgrws.getName()] MaskDetectors(self.sfvanaws, DetectorList=[1]) MaskDetectors(self.nsfvanaws, DetectorList=[1]) alg_test = run_algorithm("DNSComputeDetEffCorrCoefs", VanadiumWorkspaces=vanalist, BackgroundWorkspaces=bglist, OutputWorkspace=outputWorkspaceName) self.assertTrue(alg_test.isExecuted()) # check whether the data are correct ws = AnalysisDataService.retrieve(outputWorkspaceName) # dimensions self.assertEqual(24, ws.getNumberHistograms()) self.assertEqual(2, ws.getNumDims()) # reference data refdata = np.linspace(1.0, 24, 24)/13.0 refdata[0] = 0 # detector is masked # data array for i in range(24): self.assertAlmostEqual(refdata[i], ws.readY(i)) run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName) return
def testIntegrationWithoutDebyeWallerCorrection(self): ws = self._cloneTestWorkspace() for i in range(ws.getNumberHistograms()): ws.dataY(i).fill(float(i + 1)) ws.dataE(i).fill(numpy.sqrt(float(i + 1))) numBins = ws.blocksize() eppWSName = 'eppWS' self._EPPTable(ws, eppWSName) outWSName = 'outWS' algProperties = { 'InputWorkspace': self._TEST_WS_NAME, 'OutputWorkspace': outWSName, 'EPPWorkspace': eppWSName, 'DebyeWallerCorrection': 'Correction OFF', 'rethrow': True } run_algorithm('DirectILLIntegrateVanadium', **algProperties) self.assertTrue(mtd.doesExist(outWSName)) outWS = mtd[outWSName] self.assertEqual(outWS.getNumberHistograms(), ws.getNumberHistograms()) self.assertEqual(outWS.blocksize(), 1) for i in range(outWS.getNumberHistograms()): self.assertEqual(outWS.readY(i)[0], float(i + 1) * numBins) self.assertAlmostEqual(outWS.readE(i)[0], numpy.sqrt(float(i + 1) * numBins))
def test_basicrun(self): OutputWorkspaceName = "outputws" alg_test = run_algorithm("TOFTOFConvertTofToDeltaE", InputWorkspace=self._input_ws, OutputWorkspace=OutputWorkspaceName) wsoutput = AnalysisDataService.retrieve(OutputWorkspaceName) # execution of algorithm self.assertTrue(alg_test.isExecuted()) # unit of output self.assertEqual(wsoutput.getAxis(0).getUnit().unitID(), "DeltaE") # shape of output compared to input self.assertEqual(wsoutput.getNumberHistograms(), self._input_ws.getNumberHistograms()) self.assertEqual(wsoutput.blocksize(), self._input_ws.blocksize()) DeleteWorkspace(wsoutput)
def testDetectorGrouping(self): ws = illhelpers.create_poor_mans_in5_workspace(0.0, _groupingTestDetectors) originalNDetectors = ws.getNumberHistograms() detectorIds = list() for i in range(originalNDetectors): detectorIds.append(ws.getDetector(i).getID()) _add_natural_angle_step_parameter(ws) mtd.addOrReplace('inWS', ws) outWSName = 'outWS' algProperties = { 'InputWorkspace': ws, 'OutputWorkspace': outWSName, 'Cleanup': 'Cleanup OFF', 'Transposing': 'Transposing OFF', 'rethrow': True } run_algorithm('DirectILLReduction', **algProperties) groupedWSName = outWSName + '_grouped_detectors_' self.assertTrue(groupedWSName in mtd) groupedWS = mtd[groupedWSName] self.assertEqual(groupedWS.getNumberHistograms(), 2) groupIds = list(groupedWS.getDetector(0).getDetectorIDs()) groupIds += groupedWS.getDetector(1).getDetectorIDs() self.assertEqual(collections.Counter(detectorIds), collections.Counter(groupIds))
def test_DNSFRSelfCorrection(self): outputWorkspaceName = "DNSFlippingRatioCorrTest_Test4" # consider normalization=1.0 as set in self._create_fake_workspace dataws_sf = self.__sf_nicrws - self.__sf_bkgrws dataws_nsf = self.__nsf_nicrws - self.__nsf_bkgrws alg_test = run_algorithm("DNSFlippingRatioCorr", SFDataWorkspace=dataws_sf, NSFDataWorkspace=dataws_nsf, SFNiCrWorkspace=self.__sf_nicrws.getName(), NSFNiCrWorkspace=self.__nsf_nicrws.getName(), SFBkgrWorkspace=self.__sf_bkgrws.getName(), NSFBkgrWorkspace=self.__nsf_bkgrws.getName(), SFOutputWorkspace=outputWorkspaceName + 'SF', NSFOutputWorkspace=outputWorkspaceName + 'NSF') self.assertTrue(alg_test.isExecuted()) # check whether the data are correct ws_sf = AnalysisDataService.retrieve(outputWorkspaceName + 'SF') ws_nsf = AnalysisDataService.retrieve(outputWorkspaceName + 'NSF') # dimensions self.assertEqual(24, ws_sf.getNumberHistograms()) self.assertEqual(24, ws_nsf.getNumberHistograms()) self.assertEqual(2, ws_sf.getNumDims()) self.assertEqual(2, ws_nsf.getNumDims()) # data array: spin-flip must be zero for i in range(24): self.assertAlmostEqual(0.0, ws_sf.readY(i)[0]) # data array: non spin-flip must be nsf - sf^2/nsf nsf = np.array(dataws_nsf.extractY()) sf = np.array(dataws_sf.extractY()) refdata = nsf + sf for i in range(24): self.assertAlmostEqual(refdata[i][0], ws_nsf.readY(i)[0]) run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName + 'SF') run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName + 'NSF') run_algorithm("DeleteWorkspace", Workspace=dataws_sf) run_algorithm("DeleteWorkspace", Workspace=dataws_nsf) return
def test_omega_scan(self): args = { 'Run': self._observable_omega, 'Observable': 'SamS_Rot.value', 'OutputWorkspace': 'out' } alg_test = run_algorithm('IndirectILLReductionFWS', **args) self.assertTrue(alg_test.isExecuted(), "IndirectILLReductionFWS not executed") self._check_workspace_group(mtd['out_red'], 1, 18, 1) self.assertEqual(mtd['out_red'].getItem(0).readX(0)[0], 90)
def test_saveFileWithSingleValueProperties(self): input_ws = self._create_sample_workspace() self._add_log_to_workspace(input_ws, "Test1", 1.0) self._add_log_to_workspace(input_ws, "Test2", "Test2") test_alg = run_algorithm(self.ALG_NAME, InputWorkspace=input_ws, Filename=self.TEMP_FILE_NAME) self.assertTrue(test_alg.isExecuted()) with h5py.File(self.TEMP_FILE_NAME, "r") as output_file: self.assertTrue("Sample Logs" in output_file) logs_group = output_file["Sample Logs"] self.assertEqual(logs_group["Test1"].value, 1.0) self.assertEqual(logs_group["Test2"].value[0], b"Test2")
def test_operators_with_workspaces_in_ADS(self): run_algorithm('CreateWorkspace', OutputWorkspace='a',DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF') ads = AnalysisDataService A = ads['a'] run_algorithm('CreateWorkspace', OutputWorkspace='b', DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF') B = ads['b'] # Equality self.assertTrue(A.equals(B, 1e-8)) # Two workspaces C = A + B C = A - B C = A * B C = A / B C -= B self.assertTrue(isinstance(C, MatrixWorkspace)) C += B self.assertTrue(isinstance(C, MatrixWorkspace)) C *= B self.assertTrue(isinstance(C, MatrixWorkspace)) C /= B self.assertTrue(isinstance(C, MatrixWorkspace)) # Workspace + double B = 123.456 C = A + B C = A - B C = A * B C = A / B ads.remove('C') self.assertTrue('C' not in ads) run_algorithm('CreateWorkspace', OutputWorkspace='ca', DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF') C = ads['ca'] C *= B self.assertTrue('C' not in ads) C -= B self.assertTrue('C' not in ads) C += B self.assertTrue('C' not in ads) C /= B self.assertTrue('C' not in ads) # Check correct in place ops have been used self.assertTrue('ca' in ads) ads.remove('ca') # Commutative: double + workspace C = B * A C = B + A ads.remove('A') ads.remove('B') ads.remove('C')
def test_dwf_using_default_temperature(self): outputWorkspaceName = "output_ws" # change theta to make dwf != 1 EditInstrumentGeometry(self._input_ws, L2="4,8", Polar="0,15", Azimuthal="0,0", DetectorIDs="1,2") alg_test = run_algorithm("ComputeCalibrationCoefVan", VanadiumWorkspace=self._input_ws, EPPTable=self._table, OutputWorkspace=outputWorkspaceName) self.assertTrue(alg_test.isExecuted()) wsoutput = AnalysisDataService.retrieve(outputWorkspaceName) self._checkDWF(wsoutput, 293.0) DeleteWorkspace(wsoutput)