def test_that_can_merge_2D_reduction_when_fitting_set_to_none(self): # create an input workspace that has multiple spectra create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 2)) create_alg.setProperty('NSpec', 2) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setProperty('VerticalAxisUnit', 'MomentumTransfer') create_alg.setProperty('VerticalAxisValues', range(0, 2)) # hab counts create_alg.setProperty('DataY', [1, 1, 1, 1]) create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() hab_counts = create_alg.getProperty('OutputWorkspace').value # hab norm create_alg.setProperty('DataY', [2, 2, 2, 2]) create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() hab_norm = create_alg.getProperty('OutputWorkspace').value # lab counts create_alg.setProperty('DataY', [3, 3, 3, 3]) create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() lab_counts = create_alg.getProperty('OutputWorkspace').value # lab norm create_alg.setProperty('DataY', [4, 4, 4, 4]) create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() lab_norm = create_alg.getProperty('OutputWorkspace').value # Basic algorithm setup alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('HABCountsSample', hab_counts) alg.setProperty('LABCountsSample', lab_counts) alg.setProperty('HABNormSample', hab_norm) alg.setProperty('LABNormSample', lab_norm) alg.setProperty('ProcessCan', False) alg.setProperty('ShiftFactor', 0.0) alg.setProperty('ScaleFactor', 1.0) alg.setProperty('Mode', 'None') alg.setProperty('OutputWorkspace', 'dummy_name') errors = alg.validateInputs() self.assertEqual(0, len(errors)) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) self.assertTrue(out_ws.getNumberHistograms() == 2) expected_entries = (1. + 3.) /(2. + 4.) delta = 1e-5 for index in range(0, 2): for element in out_ws.dataY(index): self.assertTrue(abs(expected_entries - element) < delta)
def test_strip_special_values(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) y_data = np.array([1] * 7) y_data = np.append(y_data, [np.nan]) y_data = np.append(y_data, [np.inf]) create_alg.setProperty('DataY', y_data) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', single_spectra_input) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') # This would throw at the point of fitting in NaNs or infs where present alg.execute()
def test_workspace_entries_must_be_q1d(self): # create an input workspace that has multiple spectra create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 1)) create_alg.setProperty('DataY', [1, 2]) create_alg.setProperty('NSpec', 2) # Wrong number of spectra create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() multi_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', multi_spectra_input) alg.setProperty('LABCountsSample', multi_spectra_input) alg.setProperty('HABNormSample', multi_spectra_input) alg.setProperty('LABNormSample', multi_spectra_input) errors = alg.validateInputs() self.assertTrue('HABCountsSample' in errors) self.assertTrue('LABCountsSample' in errors) self.assertTrue('HABNormSample' in errors) self.assertTrue('LABNormSample' in errors)
def test_scale_none(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value in_scale_factor = 1.0 in_shift_factor = 1.0 alg = AlgorithmManager.create('SANSFitShiftScale') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABWorkspace', single_spectra_input) alg.setProperty('LABWorkspace', single_spectra_input) alg.setProperty('ShiftFactor', in_shift_factor) alg.setProperty('ScaleFactor', in_scale_factor) alg.execute() out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertEquals(out_scale_factor, in_scale_factor) self.assertEquals(out_shift_factor, in_shift_factor)
def test_can_workspaces_required_if_process_can(self): # create an input workspace that has multiple spectra create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 1)) create_alg.setProperty('DataY', [1]) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', single_spectra_input) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('ProcessCan', True) # Now can workspaces should be provided errors = alg.validateInputs() self.assertTrue('HABCountsCan' in errors) self.assertTrue('LABCountsCan' in errors) self.assertTrue('HABNormCan' in errors) self.assertTrue('LABNormCan' in errors)
def test_scale_only(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.setProperty('DataX', range(0, 10)) # HAB as linear function y=x+5 create_alg.setProperty('DataY', range(5, 14)) create_alg.execute() hab_workspace = create_alg.getProperty('OutputWorkspace').value # LAB as linear function y=x+0 create_alg.setProperty('DataY', range(0, 9)) create_alg.execute() lab_workspace= create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSFitShiftScale') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'ScaleOnly') alg.setProperty('HABWorkspace', hab_workspace) alg.setProperty('LABWorkspace', lab_workspace) alg.setProperty('ShiftFactor', -5.0) alg.setProperty('ScaleFactor', 7.2) alg.execute() out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertEquals(out_scale_factor, 1.0) self.assertEquals(out_shift_factor, -5.0)
def _remove_job(self, trans_id, job_id=None, is_running=False): """ Abort job and/or stop transaction @param trans_id: remote transaction ID @param job_id: remote job ID @param is_running: True if the job is currently running """ if is_running: try: # At this point we are authenticated so just purge alg = AlgorithmManager.create("AbortRemoteJob") alg.initialize() alg.setProperty("ComputeResource", str(self._settings.compute_resource)) alg.setProperty("JobID", job_id) alg.execute() except: Logger("cluster_status").error("Problem aborting job: %s" % sys.exc_value) try: alg = AlgorithmManager.create("StopRemoteTransaction") alg.initialize() alg.setProperty("ComputeResource", str(self._settings.compute_resource)) alg.setProperty("TransactionID", trans_id) alg.execute() except: Logger("cluster_status").error("Project stopping remote transaction: %s" % sys.exc_value) self._update_content()
def test_newestInstanceOf_returns_correct_instance(self): alg = AlgorithmManager.create("ConvertUnits") alg2 = AlgorithmManager.create("ConvertUnits") alg3 = AlgorithmManager.newestInstanceOf("ConvertUnits") self.assertEquals(alg2.getAlgorithmID(), alg3.getAlgorithmID()) self.assertNotEqual(alg.getAlgorithmID(), alg3.getAlgorithmID())
def test_stitch_2d_restricted_to_none(self): # create an input workspace that has multiple spectra create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 1)) create_alg.setProperty('DataY', [1,1]) create_alg.setProperty('NSpec', 2) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() double_spectra_input = create_alg.getProperty('OutputWorkspace').value # Basic algorithm setup alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('HABCountsSample', double_spectra_input) alg.setProperty('LABCountsSample', double_spectra_input) alg.setProperty('HABNormSample', double_spectra_input) alg.setProperty('LABNormSample', double_spectra_input) alg.setProperty('ProcessCan', False) alg.setProperty('ShiftFactor', 1.0) alg.setProperty('ScaleFactor', 0.0) # 2D inputs Should not be allowed for mode Both alg.setProperty('Mode', 'Both') errors = alg.validateInputs() self.assertTrue('HABCountsSample' in errors) self.assertTrue('LABCountsSample' in errors) self.assertTrue('HABNormSample' in errors) self.assertTrue('LABNormSample' in errors) # 2D inputs Should not be allowed for mode ScaleOnly alg.setProperty('Mode', 'ScaleOnly') errors = alg.validateInputs() self.assertTrue('HABCountsSample' in errors) self.assertTrue('LABCountsSample' in errors) self.assertTrue('HABNormSample' in errors) self.assertTrue('LABNormSample' in errors) # 2D inputs Should not be allowed for mode ShiftOnly alg.setProperty('Mode', 'ShiftOnly') errors = alg.validateInputs() self.assertTrue('HABCountsSample' in errors) self.assertTrue('LABCountsSample' in errors) self.assertTrue('HABNormSample' in errors) self.assertTrue('LABNormSample' in errors) # With no fitting 2D inputs are allowed alg.setProperty('Mode', 'None') errors = alg.validateInputs() self.assertEqual(0, len(errors))
def test_scale_both_without_can_with_q_fit_range(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.setProperty('DataX', range(0, 12)) # HAB as linear function y=x+5 hab_range = list(range(5, 16)) hab_range[0] = 15000 hab_range[9] = 15000 create_alg.setProperty('DataY', hab_range) create_alg.execute() hab_workspace = create_alg.getProperty('OutputWorkspace').value # LAB as linear function y=x+0 create_alg.setProperty('DataY', range(0, 11)) create_alg.execute() lab_workspace= create_alg.getProperty('OutputWorkspace').value # FLAT NORM create_alg.setProperty('DataY', [1] * 11) create_alg.execute() flat_norm = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', hab_workspace) alg.setProperty('LABCountsSample', lab_workspace) alg.setProperty('HABNormSample', flat_norm) alg.setProperty('LABNormSample', flat_norm) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('FitMin', 1) alg.setProperty('FitMax', 9) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertEquals(out_scale_factor, 1.0) self.assertEquals(out_shift_factor, -5.0) out_ws = alg.getProperty('OutputWorkspace').value y_array = out_ws.readY(0) expected_y_array = [7497.5, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 7502, 10.0] # We scale and shift to the back (lab) detectors self.assertTrue(all(map(lambda element: element in y_array, expected_y_array)), msg='All data should be scaled and shifted to the LAB scale=1 shift=-5')
def test_that_zero_merge_range_has_discrete_transition(self): # This tests that if a merge_max or merge_min is specified greater than the overlap region of # the HAB and LAB the relevant value is set to the maximum value. create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [2] * 9) create_alg.execute() single_spectra_input_HAB = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [0.5] * 9) create_alg.execute() smaller_single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABCountsSample', single_spectra_input_HAB) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('ProcessCan', True) alg.setProperty('HABCountsCan', smaller_single_spectra_input) alg.setProperty('LABCountsCan', smaller_single_spectra_input) alg.setProperty('HABNormCan', single_spectra_input) alg.setProperty('LABNormCan', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('ShiftFactor', 0.0) alg.setProperty('ScaleFactor', 1.0) alg.setProperty('MergeMask', True) alg.setProperty('MergeMin', 5) alg.setProperty('MergeMax', 5) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) y_array = out_ws.readY(0) expected_y_array = [0.5] * 5 + [1.5] * 4 np.testing.assert_equal(y_array, expected_y_array)
def test_scale_none_with_can_and_q_merge_range(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [2] * 9) create_alg.execute() single_spectra_input_HAB = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [0.5] * 9) create_alg.execute() smaller_single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABCountsSample', single_spectra_input_HAB) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('ProcessCan', True) alg.setProperty('HABCountsCan', smaller_single_spectra_input) alg.setProperty('LABCountsCan', smaller_single_spectra_input) alg.setProperty('HABNormCan', single_spectra_input) alg.setProperty('LABNormCan', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('ShiftFactor', 0.0) alg.setProperty('ScaleFactor', 1.0) alg.setProperty('MergeMask', True) alg.setProperty('MergeMin', 2) alg.setProperty('MergeMax', 7) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) y_array = out_ws.readY(0) expected_y_array = [0.5] * 2 + [1.0] * 5 + [1.5] * 2 self.assertTrue(all(map(lambda element: element in y_array, expected_y_array)), msg='can gets subtracted so expect 1 - 0.5 as output signal. Proves the can workspace gets used correctly.')
def test_generic_dialog(self): dialog = GenericDialog() alg = AlgorithmManager.create('AlgorithmDialogMockAlgorithm') dialog.setAlgorithm(alg) dialog.initializeLayout() input_widgets = dialog.findChildren(QLineEdit) self.assertEqual(len(input_widgets), 3)
def pre_process(self): """ Reduction steps that are meant to be executed only once per set of data files. After this is executed, all files will go through the list of reduction steps. """ Logger("Reducer").information("Setting up reduction options") if self.setup_algorithm is not None: alg = AlgorithmManager.create(self.setup_algorithm) alg.initialize() props = [p.name for p in alg.getProperties()] for key in self.reduction_properties.keys(): if key in props: try: alg.setProperty(key, self.reduction_properties[key]) except: msg = "Error setting %s=%s" % (key, str(self.reduction_properties[key])) msg += "\n %s" % sys.exc_value Logger("Reducer").error(msg) else: Logger("Reducer").warning("Setup algorithm has no %s property" % key) if "ReductionProperties" in props: alg.setPropertyValue("ReductionProperties", self.get_reduction_table_name()) alg.execute()
def test_fit_shift_requires_scale_factor(self): alg = AlgorithmManager.create('SANSFitShiftScale') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'ShiftOnly') errors = alg.validateInputs() self.assertTrue('ScaleFactor' in errors)
def test_finish_handle(self): algorithm = AlgorithmManager.create("MockAlgorithm", -1) observer = MockObserver() observer.observeFinish(algorithm) algorithm.execute() self.assertTrue(observer.finish_handled) self.assertTrue(observer.error_message is None)
def test_scale_none_with_can_and_q_merge_range_equal(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [2] * 9) create_alg.execute() single_spectra_input_HAB = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [0.5] * 9) create_alg.execute() smaller_single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABCountsSample', single_spectra_input_HAB) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('ProcessCan', True) alg.setProperty('HABCountsCan', smaller_single_spectra_input) alg.setProperty('LABCountsCan', smaller_single_spectra_input) alg.setProperty('HABNormCan', single_spectra_input) alg.setProperty('LABNormCan', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('ShiftFactor', 0.0) alg.setProperty('ScaleFactor', 1.0) alg.setProperty('MergeMask', True) alg.setProperty('MergeMin', 5) alg.setProperty('MergeMax', 5) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) y_array = out_ws.readY(0) expected_y_array = [0.5] * 5 + [1.5] * 4 np.testing.assert_equal(y_array, expected_y_array)
def test_none_mode_requires_scale_and_shift_factors(self): alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') errors = alg.validateInputs() self.assertTrue('ScaleFactor' in errors) self.assertTrue('ShiftFactor' in errors)
def test_scale_both_without_can(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.setProperty('DataX', range(0, 10)) # HAB as linear function y=x+5 create_alg.setProperty('DataY', range(5, 14)) create_alg.execute() hab_workspace = create_alg.getProperty('OutputWorkspace').value # LAB as linear function y=x+0 create_alg.setProperty('DataY', range(0, 9)) create_alg.execute() lab_workspace= create_alg.getProperty('OutputWorkspace').value # FLAT NORM create_alg.setProperty('DataY', [1] * 9) create_alg.execute() flat_norm = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', hab_workspace) alg.setProperty('LABCountsSample', lab_workspace) alg.setProperty('HABNormSample', flat_norm) alg.setProperty('LABNormSample', flat_norm) alg.setProperty('OutputWorkspace', 'dummy_name') alg.execute() out_ws = alg.getProperty('OutputWorkspace').value out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertEquals(out_scale_factor, 1.0) self.assertEquals(out_shift_factor, -5.0) y_array = out_ws.readY(0) expected_y_array = lab_workspace.readY(0) # We scale and shift to the back (lab) detectors np.testing.assert_equal(y_array, expected_y_array)
def test_progress_handle(self): algorithm = AlgorithmManager.create("MockAlgorithm", -1) observer = MockObserver() observer.observeProgress(algorithm) algorithm.execute() self.assertTrue(observer.first_progress_reported) self.assertTrue(observer.second_progress_reported) self.assertEqual(observer.progress_message, 'Half way')
def test_error_handle(self): algorithm = AlgorithmManager.create("MockAlgorithm", -1) algorithm.setProperty("Error", True) observer = MockObserver() observer.observeFinish(algorithm) observer.observeError(algorithm) algorithm.execute() self.assertTrue(observer.finish_handled) self.assertTrue(observer.error_message.startswith('Error in algorithm'))
def test_scale_only_without_can(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.setProperty('DataX', range(0, 10)) # HAB as linear function y=x+5 create_alg.setProperty('DataY', range(5, 14)) create_alg.execute() hab_workspace = create_alg.getProperty('OutputWorkspace').value # LAB as linear function y=x+0 create_alg.setProperty('DataY', range(0, 9)) create_alg.execute() lab_workspace= create_alg.getProperty('OutputWorkspace').value # FLAT NORM create_alg.setProperty('DataY', [1] * 9) create_alg.execute() flat_norm = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'ScaleOnly') alg.setProperty('HABCountsSample', hab_workspace) alg.setProperty('LABCountsSample', lab_workspace) alg.setProperty('HABNormSample', flat_norm) alg.setProperty('LABNormSample', flat_norm) alg.setProperty('ShiftFactor', -5.0) alg.setProperty('OutputWorkspace', 'dummy_name') alg.execute() out_ws = alg.getProperty('OutputWorkspace').value y_array = out_ws.readY(0) expected_y_array = lab_workspace.readY(0) # We scale and shift to the back (lab) detectors self.assertTrue(all(map(lambda element: element in y_array, expected_y_array)), msg='All data should be scaled and shifted to the LAB scale=1 shift=-5')
def _run_algorithm(self, algorithm_name, child_algorithm=False, record_history=True, **kwargs): """ Create and run an algorithm not in the simpleapi""" alg = AlgorithmManager.create(algorithm_name) alg.initialize() alg.setChild(child_algorithm) alg.enableHistoryRecordingForChild(record_history) for key, value in iteritems(kwargs): alg.setProperty(key, value) alg.execute() return alg
def test_bands_must_not_overlap(self): alg = AlgorithmManager.create("DetectorFloodWeighting") alg.setChild(True) alg.initialize() signal_value = 2 in_ws = self._create_ws(units="Wavelength", signal_value=signal_value, data_x=range(0,10,1)) alg.setProperty("InputWorkspace", in_ws) bands = [1,3,2,4] # Overlap! alg.setProperty("Bands", bands) # One band alg.setPropertyValue("OutputWorkspace", "dummy") self.assertRaises(RuntimeError, alg.execute)
def test_set_up_madatory(self): alg = AlgorithmManager.create("CreateMD") alg.setRethrows(True) alg.initialize() alg.setPropertyValue("OutputWorkspace", "mdworkspace") alg.setProperty("DataSources", ['a', 'b']) alg.setProperty("Emode", "Direct") alg.setProperty("Alatt", [1,1,1]) alg.setProperty("Angdeg", [90,90,90]) alg.setProperty("u", [0,0,1]) alg.setProperty("v", [1,0,0])
def _create_ws(self, units="TOF", signal_value=2, data_x=range(0,10), n_spec=1): data_y=[signal_value]*(len(data_x) - 1) alg = AlgorithmManager.create("CreateWorkspace") alg.setChild(True) alg.initialize() alg.setProperty("DataX", data_x) alg.setProperty("DataY", data_y) alg.setProperty("NSpec", n_spec) alg.setProperty("OutputWorkspace", "temp") alg.setProperty("UnitX", units) alg.execute() return alg.getProperty("OutputWorkspace").value
def do_test_scale_both(self, hab_range, min_x = 0.0, max_x = 1000.0): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.setProperty('DataX', range(0, 10)) # HAB as linear function y=x+5 create_alg.setProperty('DataY', hab_range) create_alg.execute() hab_workspace = create_alg.getProperty('OutputWorkspace').value # LAB as linear function y=x+0 create_alg.setProperty('DataY', range(0, 9)) create_alg.execute() lab_workspace = create_alg.getProperty('OutputWorkspace').value # FLAT NORM create_alg.setProperty('DataY', [1] * 9) create_alg.execute() flat_norm = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSFitShiftScale') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABWorkspace', hab_workspace) alg.setProperty('LABWorkspace', lab_workspace) alg.setProperty('ShiftFactor', -7.6) alg.setProperty('ScaleFactor', 2.4) alg.setProperty("FitMin", min_x) alg.setProperty("FitMax", max_x) alg.execute() out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertEquals(out_scale_factor, 1.0) self.assertEquals(out_shift_factor, -5.0)
def test_scale_none(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value in_scale_factor = 1.0 in_shift_factor = 1.0 alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABCountsSample', single_spectra_input) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('ShiftFactor', in_shift_factor) alg.setProperty('ScaleFactor', in_scale_factor) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) self.assertEquals(out_scale_factor, in_scale_factor) self.assertEquals(out_shift_factor, in_shift_factor) y_array = out_ws.readY(0) expected_y_array = [1.5] * 9 self.assertTrue(all(map(lambda element: element in y_array, expected_y_array))) x_array = out_ws.readX(0)
def _mask_to_roi(self, ws_mask): # invert mask and then extract "masked" detectors in order to get ROI # BUG in Mantid forces us to use AnalysisDataService alg = AlgorithmManager.create("InvertMask") alg.initialize() alg.setProperty("InputWorkspace", ws_mask) alg.setPropertyValue("OutputWorkspace", "_ws") alg.execute() ws_tranmskinv = AnalysisDataService.retrieve("_ws") alg = self.createChildAlgorithm("ExtractMask") alg.setProperty("InputWorkspace", ws_tranmskinv) alg.execute() AnalysisDataService.remove("_ws") return alg.getProperty("DetectorList").value
def test_execute_multi_file(self): alg = AlgorithmManager.create("CreateMD") alg.setRethrows(True) alg.initialize() alg.setPropertyValue("OutputWorkspace", "mdworkspace") alg.setProperty("DataSources", ['CNCS_7860_event.nxs', 'CNCS_7860_event.nxs']) alg.setProperty("Alatt", [1,1,1]) alg.setProperty("Angdeg", [90,90,90]) alg.setProperty("EFix", [12.0, 13.0]) alg.setProperty("u", [0,0,1]) alg.setProperty("v", [1,0,0]) alg.execute() out_ws = AnalysisDataService.retrieve("mdworkspace") self.assertTrue(isinstance(out_ws, IMDEventWorkspace), "Expected an MDEventWorkspace back")
def test_managed_cppalg_isinstance_of_AlgorithmProxy(self): alg = AlgorithmManager.create("ConvertUnits") self.assertTrue(isinstance(alg, AlgorithmProxy))
def test_size_reports_number_of_managed_algorithms(self): old_size = AlgorithmManager.size() new_alg = AlgorithmManager.create("ConvertUnits") new_size = AlgorithmManager.size() self.assertEqual(new_size, old_size + 1)
def test_getAlgorithm_returns_correct_instance(self): returned_instance = AlgorithmManager.create("ConvertUnits") id = returned_instance.getAlgorithmID() mgr_instance = AlgorithmManager.getAlgorithm(id) self.assertEqual(id, mgr_instance.getAlgorithmID())
def __init__(self): threading.Thread.__init__(self) self.algorithm = AlgorithmManager.create("Pause")
def CalculateEi(guess=None): alg = AlgorithmManager.create("InelasticCalcEi") alg.setProperty("EiGuess",guess) ReductionSingleton().set_ei_calculator(alg)
def test_initalize(self): alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() self.assertTrue(alg.isInitialized())
def test_created_alg_isinstance_of_IAlgorithm(self): alg = AlgorithmManager.create("ConvertUnits") self.assertTrue(isinstance(alg, IAlgorithm))
def _update_content(self): """ Get the job status from the compute resource and update the job table content. """ self._fill_in_defaults() user = str(self._content.username_edit.text()) pwd = str(self._content.password_edit.text()) if len(user) == 0 or len(pwd) == 0: util.set_valid(self._content.username_edit, False) util.set_valid(self._content.password_edit, False) return else: self._settings.cluster_user = user self._settings.cluster_pass = pwd util.set_valid(self._content.username_edit, True) util.set_valid(self._content.password_edit, True) alg = AlgorithmManager.create("Authenticate") alg.initialize() alg.setProperty("ComputeResource", str(self._settings.compute_resource)) alg.setProperty("UserName", str(self._settings.cluster_user)) alg.setProperty("Password", str(self._settings.cluster_pass)) alg.execute() alg = AlgorithmManager.create("QueryAllRemoteJobs") alg.initialize() alg.setProperty("ComputeResource", str(self._settings.compute_resource)) alg.execute() job_id = alg.getProperty("JobId").value job_status = alg.getProperty("JobStatusString").value job_name = alg.getProperty("JobName").value job_trans_id = alg.getProperty("TransID").value njobs = len(job_name) job_start = alg.getProperty("StartDate").value job_end = alg.getProperty("CompletionDate").value job_list = zip(*(job_id, job_status, job_name, job_start, job_end, job_trans_id)) self._clear_table() self._content.job_table.setSortingEnabled(False) self._content.job_table.setRowCount(len(job_list)) unavailable = DateAndTime(0) unavailable.setToMinimum() for i in range(len(job_list)): # Make sure that only recent jobs are displayed oldest = DateAndTime( str(self._content.date_time_edit.dateTime().toString( QtCore.Qt.ISODate))) end_time = job_list[i][4] if end_time == '': job_end = unavailable else: job_end = DateAndTime(end_time) if job_end > unavailable and job_end < oldest: self._content.job_table.setRowHidden(i, True) continue self._content.job_table.setRowHidden(i, False) # Job ID item = QtGui.QTableWidgetItem(str(job_list[i][0])) item.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled) self._content.job_table.setItem(i, 0, item) job_id = str(job_list[i][0]) # Title item = QtGui.QTableWidgetItem(str(job_list[i][2])) item.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled) self._content.job_table.setItem(i, 1, item) # Status item = QtGui.QTableWidgetItem(str(job_list[i][1])) item.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled) self._content.job_table.setItem(i, 2, item) is_running = str(job_list[i][1]).lower() == 'running' # Start time time_displayed = str(job_list[i][3]).replace('T', ' ') if DateAndTime(job_list[i][3]) == unavailable: time_displayed = '' item = QtGui.QTableWidgetItem(time_displayed) item.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled) self._content.job_table.setItem(i, 3, item) # Completion time time_displayed = end_time.replace('T', ' ') if job_end == unavailable: time_displayed = '' item = QtGui.QTableWidgetItem(time_displayed) item.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled) self._content.job_table.setItem(i, 4, item) # create an cell widget btn = QtGui.QPushButton(self._content.job_table) if is_running: btn.setText('Abort') btn.setToolTip('Cleanly abort this job') else: btn.setText('Remove') btn.setToolTip('Remove this job and its temporary files') call_back = partial(self._remove_job, is_running=is_running, job_id=job_id, trans_id=job_list[i][5]) self.connect(btn, QtCore.SIGNAL("clicked()"), call_back) self._content.job_table.setCellWidget(i, 5, btn) self._content.job_table.setSortingEnabled(True) self._content.job_table.sortItems(3, 1)
def group_spectra_of(workspace, masked_detectors, method, group_file=None, group_ws=None, group_string=None): """ Groups spectra in a given workspace according to the Workflow.GroupingMethod and Workflow.GroupingFile parameters and GroupingPolicy property. @param workspace Workspace to group spectra of @param masked_detectors List of spectra numbers to mask @param method Grouping method (IPF, All, Individual, File, Workspace) @param group_file File for File method @param group_ws Workspace for Workspace method @param group_string String for custom method - comma separated list or range """ instrument = workspace.getInstrument() group_detectors = AlgorithmManager.create("GroupDetectors") group_detectors.setChild(True) group_detectors.setProperty("InputWorkspace", workspace) group_detectors.setProperty("Behaviour", 'Average') # If grouping as per he IPF is desired if method == 'IPF': # Get the grouping method from the parameter file try: grouping_method = instrument.getStringParameter( 'Workflow.GroupingMethod')[0] except IndexError: grouping_method = 'Individual' else: # Otherwise use the value of GroupingPolicy grouping_method = method logger.information('Grouping method for workspace %s is %s' % (workspace.name(), grouping_method)) if grouping_method == 'Individual': # Nothing to do here return None elif grouping_method == 'All': # Get a list of all spectra minus those which are masked num_spec = workspace.getNumberHistograms() spectra_list = [ spec for spec in range(0, num_spec) if spec not in masked_detectors ] # Apply the grouping group_detectors.setProperty("WorkspaceIndexList", spectra_list) elif grouping_method == 'File': # Get the filename for the grouping file if group_file is not None: grouping_file = group_file else: try: grouping_file = instrument.getStringParameter( 'Workflow.GroupingFile')[0] except IndexError: raise RuntimeError( 'Cannot get grouping file from properties or IPF.') # If the file is not found assume it is in the grouping files directory if not os.path.isfile(grouping_file): grouping_file = os.path.join( config.getString('groupingFiles.directory'), grouping_file) # If it is still not found just give up if not os.path.isfile(grouping_file): raise RuntimeError('Cannot find grouping file: %s' % grouping_file) # Mask detectors if required if len(masked_detectors) > 0: _mask_detectors(workspace, masked_detectors) # Apply the grouping group_detectors.setProperty("MapFile", grouping_file) elif grouping_method == 'Workspace': # Apply the grouping group_detectors.setProperty("CopyGroupingFromWorkspace", group_ws) elif grouping_method == 'Custom': # Mask detectors if required if len(masked_detectors) > 0: _mask_detectors(workspace, masked_detectors) return group_on_string(group_detectors, group_string) else: raise RuntimeError('Invalid grouping method %s for workspace %s' % (grouping_method, workspace.name())) group_detectors.setProperty("OutputWorkspace", "__temp") group_detectors.execute() return group_detectors.getProperty("OutputWorkspace").value
def FixEi(ei): alg = AlgorithmManager.create("InelasticFixEi") alg.setProperty("Ei", ei) ReductionSingleton().set_ei_calculator(alg)