def test_group_interface(self): run_algorithm('CreateWorkspace', OutputWorkspace='First',DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF') run_algorithm('CreateWorkspace', OutputWorkspace='Second',DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF') run_algorithm('GroupWorkspaces',InputWorkspaces='First,Second',OutputWorkspace='grouped') grouped = mtd['grouped'] self.assertEquals(type(grouped), WorkspaceGroup) self.assertEquals(2, grouped.size()) self.assertEquals(2, grouped.getNumberOfEntries()) # Matches operator self.assertEquals(len(grouped), grouped.getNumberOfEntries()) # Matches length of name list names = grouped.getNames() self.assertEquals(str(names), "['First','Second']") self.assertEquals(len(grouped), len(names)) expected = ['First', 'Second'] for i in range(len(names)): self.assertEquals(expected[i], names[i]) # Clearing the data should leave the handle unusable mtd.clear() try: grouped.getNames() self.fail("WorkspaceGroup handle is still usable after ADS has been cleared, it should be a weak reference and raise an error.") except RuntimeError as exc: self.assertEquals(str(exc), 'Variable invalidated, data has been deleted.')
def test_that_a_group_is_invalidated_if_ADS_is_cleared_and_RuntimeError_raised(self): group = self.create_group_via_GroupWorkspace_algorithm() mtd.clear() with self.assertRaises(RuntimeError): group.getNames()
def testCleanupOFF(self): # test if intermediate workspaces exist: # normalise_to_slits, normalise_to_monitor, '_normalised_to_time_','transposed_flat_background' outWSName = 'outWS' ws = illhelpers.create_poor_mans_d17_workspace() ws = illhelpers.refl_add_line_position(ws, 3.0) self.assertEquals(ws.run().getProperty(common.SampleLogs.LINE_POSITION).value, 3.0) # Add a peak to the workspace. for i in range(33, 100): ys = ws.dataY(i) ys += 10.0 args = { 'InputWorkspace': ws, 'OutputWorkspace': outWSName, 'TwoTheta': 0.6, 'Cleanup': 'Cleanup OFF', 'WaterWorkspace': ws, 'ForegroundHalfWidth': [1, 2], 'FluxNormalisation': 'Normalisation OFF', 'rethrow': True, 'child': True } alg = create_algorithm('ReflectometryILLPreprocess', **args) assertRaisesNothing(self, alg.execute) wsInADS = mtd.getObjectNames() self.assertEquals(len(wsInADS), 13) self.assertEquals(wsInADS, [ 'outWS_cloned_for_flat_bkg_', 'outWS_detectors_', 'outWS_detectors_moved_', 'outWS_flat_background_', 'outWS_flat_background_subtracted_', 'outWS_in_wavelength_', 'outWS_monitors_', 'outWS_peak_', 'outWS_transposed_clone_', 'outWS_transposed_flat_background_', 'outWS_water_calibrated_', 'outWS_water_detectors_', 'outWS_water_rebinned_'] ) mtd.clear()
def test_group_interface(self): run_algorithm('CreateWorkspace', OutputWorkspace='First', DataX=[1., 2., 3.], DataY=[2., 3.], DataE=[2., 3.], UnitX='TOF') run_algorithm('CreateWorkspace', OutputWorkspace='Second', DataX=[1., 2., 3.], DataY=[2., 3.], DataE=[2., 3.], UnitX='TOF') run_algorithm('GroupWorkspaces', InputWorkspaces='First,Second', OutputWorkspace='grouped') grouped = mtd['grouped'] self.assertEquals(type(grouped), WorkspaceGroup) self.assertEquals(2, grouped.size()) self.assertEquals(2, grouped.getNumberOfEntries()) # Matches operator self.assertEquals(len(grouped), grouped.getNumberOfEntries()) # Matches length of name list names = grouped.getNames() self.assertEquals(str(names), "['First','Second']") self.assertEquals(len(grouped), len(names)) expected = ['First', 'Second'] for i in range(len(names)): self.assertEquals(expected[i], names[i]) # Clearing the data should leave the handle unusable mtd.clear() try: grouped.getNames() self.fail( "WorkspaceGroup handle is still usable after ADS has been cleared, it should be a weak reference and raise an error." ) except RuntimeError as exc: self.assertEquals(str(exc), 'Variable invalidated, data has been deleted.')
def test_extract_spectra(self): mtd.clear() ws = CreateSampleWorkspace("Histogram", "Multiple Peaks") det_ids = [100, 102, 104] result = su.extract_spectra(ws, det_ids, "result") # Essentially, do we end up with our original workspace and the resulting # workspace in the ADS, and NOTHING else? self.assertTrue("result" in mtd) self.assertTrue("ws" in mtd) self.assertEquals(2, len(mtd)) self.assertEquals(result.getNumberHistograms(), len(det_ids)) self.assertEquals(result.getDetector(0).getID(), 100) self.assertEquals(result.getDetector(1).getID(), 102) self.assertEquals(result.getDetector(2).getID(), 104) ws = CreateSampleWorkspace("Histogram", "Multiple Peaks") det_ids = range(100, 299, 2) result = su.extract_spectra(ws, det_ids, "result")
def testCleanupOFF(self): # test if intermediate workspaces exist: # normalise_to_slits, normalise_to_monitor, '_normalised_to_time_','transposed_flat_background' outWSName = 'outWS' ws = illhelpers.create_poor_mans_d17_workspace() ws = illhelpers.refl_add_line_position(ws, 3.0) self.assertEqual( ws.run().getProperty(common.SampleLogs.LINE_POSITION).value, 3.0) # Add a peak to the workspace. for i in range(33, 100): ys = ws.dataY(i) ys += 10.0 args = { 'InputWorkspace': ws, 'OutputWorkspace': outWSName, 'TwoTheta': 0.6, 'Cleanup': 'Cleanup OFF', 'WaterWorkspace': ws, 'ForegroundHalfWidth': [1, 2], 'FluxNormalisation': 'Normalisation OFF', 'rethrow': True, 'child': True } alg = create_algorithm('ReflectometryILLPreprocess', **args) assertRaisesNothing(self, alg.execute) wsInADS = mtd.getObjectNames() self.assertEqual(len(wsInADS), 13) self.assertEqual(wsInADS, [ 'outWS_cloned_for_flat_bkg_', 'outWS_detectors_', 'outWS_detectors_moved_', 'outWS_flat_background_', 'outWS_flat_background_subtracted_', 'outWS_in_wavelength_', 'outWS_monitors_', 'outWS_peak_', 'outWS_transposed_clone_', 'outWS_transposed_flat_background_', 'outWS_water_calibrated_', 'outWS_water_detectors_', 'outWS_water_rebinned_' ]) mtd.clear()
def tearDown(self): mtd.clear()
def tearDown(self): if self._facility: config['default.facility'] = self._facility if self._instrument: config['default.instrument'] = self._instrument mtd.clear()
def tearDown(self): mtd.clear() config['default.facility'] = self._oldFacility config['default.instrument'] = self._oldInstrument
def tearDown(self): """Clear up after each test""" mtd.clear()
def tearDownClass(cls): mtd.clear() if os.path.exists(os.path.join(gettempdir(), 'logbook.csv')): os.remove(os.path.join(gettempdir(), 'logbook.csv'))
def tearDownClass(cls): mtd.clear()