def test_runningInstancesOf_returns_python_list(self): algs = AlgorithmManager.runningInstancesOf("ConvertUnits") self.assertTrue(isinstance(algs, list)) import threading class AlgThread(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.algorithm = AlgorithmManager.create("Pause") def run(self): self.algorithm.initialize() self.algorithm.setProperty("Duration", -1.0) #forever self.algorithm.execute() # end class pause_thread = AlgThread() try: pause_thread.start() while not pause_thread.algorithm.isRunning(): pass # should now be running algs = AlgorithmManager.runningInstancesOf("Pause") self.assertTrue(isinstance(algs, list)) self.assertEquals(1, len(algs)) except: pause_thread.algorithm.cancel() pause_thread.join() raise finally: pause_thread.algorithm.cancel() pause_thread.join()
def setUp(self): if self._integration is None: self.__class__._integration = AlgorithmManager.createUnmanaged("Integration") self.__class__._integration.initialize() if self._mask_dets is None: self.__class__._mask_dets = AlgorithmManager.createUnmanaged("MaskDetectors") self.__class__._mask_dets.initialize()
def test_newestInstanceOf_returns_correct_instance(self): alg = AlgorithmManager.create("ConvertUnits") alg2 = AlgorithmManager.create("ConvertUnits") alg3 = AlgorithmManager.newestInstanceOf("ConvertUnits") self.assertEquals(alg2.getAlgorithmID(), alg3.getAlgorithmID()) self.assertNotEqual(alg.getAlgorithmID(), alg3.getAlgorithmID())
def test_AlgorithmID_compares_by_value(self): alg = AlgorithmManager.createUnmanaged('Load') id = alg.getAlgorithmID() self.assertEquals(id, id) # equals itself alg2 = AlgorithmManager.createUnmanaged('Load') id2 = alg2.getAlgorithmID() self.assertNotEqual(id2, id)
def test_workspace_entries_must_be_q1d(self): # create an input workspace that has multiple spectra create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 1)) create_alg.setProperty('DataY', [1, 2]) create_alg.setProperty('NSpec', 2) # Wrong number of spectra create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() multi_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', multi_spectra_input) alg.setProperty('LABCountsSample', multi_spectra_input) alg.setProperty('HABNormSample', multi_spectra_input) alg.setProperty('LABNormSample', multi_spectra_input) errors = alg.validateInputs() self.assertTrue('HABCountsSample' in errors) self.assertTrue('LABCountsSample' in errors) self.assertTrue('HABNormSample' in errors) self.assertTrue('LABNormSample' in errors)
def test_can_workspaces_required_if_process_can(self): # create an input workspace that has multiple spectra create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 1)) create_alg.setProperty('DataY', [1]) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', single_spectra_input) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('ProcessCan', True) # Now can workspaces should be provided errors = alg.validateInputs() self.assertTrue('HABCountsCan' in errors) self.assertTrue('LABCountsCan' in errors) self.assertTrue('HABNormCan' in errors) self.assertTrue('LABNormCan' in errors)
def test_strip_special_values(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) y_data = np.array([1] * 7) y_data = np.append(y_data, [np.nan]) y_data = np.append(y_data, [np.inf]) create_alg.setProperty('DataY', y_data) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', single_spectra_input) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') # This would throw at the point of fitting in NaNs or infs where present alg.execute()
def test_that_can_merge_2D_reduction_when_fitting_set_to_none(self): # create an input workspace that has multiple spectra create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 2)) create_alg.setProperty('NSpec', 2) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setProperty('VerticalAxisUnit', 'MomentumTransfer') create_alg.setProperty('VerticalAxisValues', range(0, 2)) # hab counts create_alg.setProperty('DataY', [1, 1, 1, 1]) create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() hab_counts = create_alg.getProperty('OutputWorkspace').value # hab norm create_alg.setProperty('DataY', [2, 2, 2, 2]) create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() hab_norm = create_alg.getProperty('OutputWorkspace').value # lab counts create_alg.setProperty('DataY', [3, 3, 3, 3]) create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() lab_counts = create_alg.getProperty('OutputWorkspace').value # lab norm create_alg.setProperty('DataY', [4, 4, 4, 4]) create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() lab_norm = create_alg.getProperty('OutputWorkspace').value # Basic algorithm setup alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('HABCountsSample', hab_counts) alg.setProperty('LABCountsSample', lab_counts) alg.setProperty('HABNormSample', hab_norm) alg.setProperty('LABNormSample', lab_norm) alg.setProperty('ProcessCan', False) alg.setProperty('ShiftFactor', 0.0) alg.setProperty('ScaleFactor', 1.0) alg.setProperty('Mode', 'None') alg.setProperty('OutputWorkspace', 'dummy_name') errors = alg.validateInputs() self.assertEqual(0, len(errors)) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) self.assertTrue(out_ws.getNumberHistograms() == 2) expected_entries = (1. + 3.) /(2. + 4.) delta = 1e-5 for index in range(0, 2): for element in out_ws.dataY(index): self.assertTrue(abs(expected_entries - element) < delta)
def _remove_job(self, trans_id, job_id=None, is_running=False): """ Abort job and/or stop transaction @param trans_id: remote transaction ID @param job_id: remote job ID @param is_running: True if the job is currently running """ if is_running: try: # At this point we are authenticated so just purge alg = AlgorithmManager.create("AbortRemoteJob") alg.initialize() alg.setProperty("ComputeResource", str(self._settings.compute_resource)) alg.setProperty("JobID", job_id) alg.execute() except: Logger("cluster_status").error("Problem aborting job: %s" % sys.exc_value) try: alg = AlgorithmManager.create("StopRemoteTransaction") alg.initialize() alg.setProperty("ComputeResource", str(self._settings.compute_resource)) alg.setProperty("TransactionID", trans_id) alg.execute() except: Logger("cluster_status").error("Project stopping remote transaction: %s" % sys.exc_value) self._update_content()
def test_scale_only(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.setProperty('DataX', range(0, 10)) # HAB as linear function y=x+5 create_alg.setProperty('DataY', range(5, 14)) create_alg.execute() hab_workspace = create_alg.getProperty('OutputWorkspace').value # LAB as linear function y=x+0 create_alg.setProperty('DataY', range(0, 9)) create_alg.execute() lab_workspace= create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSFitShiftScale') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'ScaleOnly') alg.setProperty('HABWorkspace', hab_workspace) alg.setProperty('LABWorkspace', lab_workspace) alg.setProperty('ShiftFactor', -5.0) alg.setProperty('ScaleFactor', 7.2) alg.execute() out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertEquals(out_scale_factor, 1.0) self.assertEquals(out_shift_factor, -5.0)
def test_scale_none(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value in_scale_factor = 1.0 in_shift_factor = 1.0 alg = AlgorithmManager.create('SANSFitShiftScale') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABWorkspace', single_spectra_input) alg.setProperty('LABWorkspace', single_spectra_input) alg.setProperty('ShiftFactor', in_shift_factor) alg.setProperty('ScaleFactor', in_scale_factor) alg.execute() out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertEquals(out_scale_factor, in_scale_factor) self.assertEquals(out_shift_factor, in_shift_factor)
def validateInputs(self): issues = dict() loader = self.getPropertyValue('LoaderName') version = self.getProperty('LoaderVersion').value try: AlgorithmManager.createUnmanaged(loader, version) except RuntimeError: message = loader + '-v' + str(version) + ' is not registered with Mantid.' issues['LoaderName'] = message issues['LoaderVersion'] = message return issues
def test_scale_both_without_can_with_q_fit_range(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.setProperty('DataX', range(0, 12)) # HAB as linear function y=x+5 hab_range = list(range(5, 16)) hab_range[0] = 15000 hab_range[9] = 15000 create_alg.setProperty('DataY', hab_range) create_alg.execute() hab_workspace = create_alg.getProperty('OutputWorkspace').value # LAB as linear function y=x+0 create_alg.setProperty('DataY', range(0, 11)) create_alg.execute() lab_workspace= create_alg.getProperty('OutputWorkspace').value # FLAT NORM create_alg.setProperty('DataY', [1] * 11) create_alg.execute() flat_norm = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', hab_workspace) alg.setProperty('LABCountsSample', lab_workspace) alg.setProperty('HABNormSample', flat_norm) alg.setProperty('LABNormSample', flat_norm) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('FitMin', 1) alg.setProperty('FitMax', 9) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertEquals(out_scale_factor, 1.0) self.assertEquals(out_shift_factor, -5.0) out_ws = alg.getProperty('OutputWorkspace').value y_array = out_ws.readY(0) expected_y_array = [7497.5, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 7502, 10.0] # We scale and shift to the back (lab) detectors self.assertTrue(all(map(lambda element: element in y_array, expected_y_array)), msg='All data should be scaled and shifted to the LAB scale=1 shift=-5')
def test_stitch_2d_restricted_to_none(self): # create an input workspace that has multiple spectra create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 1)) create_alg.setProperty('DataY', [1,1]) create_alg.setProperty('NSpec', 2) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() double_spectra_input = create_alg.getProperty('OutputWorkspace').value # Basic algorithm setup alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('HABCountsSample', double_spectra_input) alg.setProperty('LABCountsSample', double_spectra_input) alg.setProperty('HABNormSample', double_spectra_input) alg.setProperty('LABNormSample', double_spectra_input) alg.setProperty('ProcessCan', False) alg.setProperty('ShiftFactor', 1.0) alg.setProperty('ScaleFactor', 0.0) # 2D inputs Should not be allowed for mode Both alg.setProperty('Mode', 'Both') errors = alg.validateInputs() self.assertTrue('HABCountsSample' in errors) self.assertTrue('LABCountsSample' in errors) self.assertTrue('HABNormSample' in errors) self.assertTrue('LABNormSample' in errors) # 2D inputs Should not be allowed for mode ScaleOnly alg.setProperty('Mode', 'ScaleOnly') errors = alg.validateInputs() self.assertTrue('HABCountsSample' in errors) self.assertTrue('LABCountsSample' in errors) self.assertTrue('HABNormSample' in errors) self.assertTrue('LABNormSample' in errors) # 2D inputs Should not be allowed for mode ShiftOnly alg.setProperty('Mode', 'ShiftOnly') errors = alg.validateInputs() self.assertTrue('HABCountsSample' in errors) self.assertTrue('LABCountsSample' in errors) self.assertTrue('HABNormSample' in errors) self.assertTrue('LABNormSample' in errors) # With no fitting 2D inputs are allowed alg.setProperty('Mode', 'None') errors = alg.validateInputs() self.assertEqual(0, len(errors))
def test_that_zero_merge_range_has_discrete_transition(self): # This tests that if a merge_max or merge_min is specified greater than the overlap region of # the HAB and LAB the relevant value is set to the maximum value. create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [2] * 9) create_alg.execute() single_spectra_input_HAB = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [0.5] * 9) create_alg.execute() smaller_single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABCountsSample', single_spectra_input_HAB) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('ProcessCan', True) alg.setProperty('HABCountsCan', smaller_single_spectra_input) alg.setProperty('LABCountsCan', smaller_single_spectra_input) alg.setProperty('HABNormCan', single_spectra_input) alg.setProperty('LABNormCan', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('ShiftFactor', 0.0) alg.setProperty('ScaleFactor', 1.0) alg.setProperty('MergeMask', True) alg.setProperty('MergeMin', 5) alg.setProperty('MergeMax', 5) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) y_array = out_ws.readY(0) expected_y_array = [0.5] * 5 + [1.5] * 4 np.testing.assert_equal(y_array, expected_y_array)
def test_scale_none_with_can_and_q_merge_range(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [2] * 9) create_alg.execute() single_spectra_input_HAB = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [0.5] * 9) create_alg.execute() smaller_single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABCountsSample', single_spectra_input_HAB) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('ProcessCan', True) alg.setProperty('HABCountsCan', smaller_single_spectra_input) alg.setProperty('LABCountsCan', smaller_single_spectra_input) alg.setProperty('HABNormCan', single_spectra_input) alg.setProperty('LABNormCan', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('ShiftFactor', 0.0) alg.setProperty('ScaleFactor', 1.0) alg.setProperty('MergeMask', True) alg.setProperty('MergeMin', 2) alg.setProperty('MergeMax', 7) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) y_array = out_ws.readY(0) expected_y_array = [0.5] * 2 + [1.0] * 5 + [1.5] * 2 self.assertTrue(all(map(lambda element: element in y_array, expected_y_array)), msg='can gets subtracted so expect 1 - 0.5 as output signal. Proves the can workspace gets used correctly.')
def test_with_workspace_types(self): ws = CreateSampleWorkspace( Function="User Defined", UserDefinedFunction="name=LinearBackground, A0=0.3;name=Gaussian, " "PeakCentre=5, Height=10, Sigma=0.7", NumBanks=1, BankPixelWidth=1, XMin=0, XMax=10, BinWidth=0.1) # Setup the model, here a Gaussian, to fit to data tryCentre = '4' # A start guess on peak centre sigma = '1' # A start guess on peak width height = '8' # A start guess on peak height myFunc = 'name=Gaussian, Height=' + height + ', PeakCentre=' + tryCentre + ', Sigma=' + sigma args = {"Function": myFunc, "InputWorkspace": ws, "Output": 'fit'} parent_alg = AlgorithmManager.createUnmanaged('Load') child_alg = parent_alg.createChildAlgorithm('Fit', 0, 0, True, version=1, **args) child_alg.execute() out_ws = child_alg.getProperty("OutputWorkspace").value self.assertIsInstance(out_ws, Workspace)
def _provide_sample_workspace(): alg = AlgorithmManager.createUnmanaged("CreateSampleWorkspace") alg.setChild(True) alg.initialize() alg.setProperty("OutputWorkspace", "dummy") alg.execute() return alg.getProperty("OutputWorkspace").value
def test_generic_dialog(self): dialog = GenericDialog() alg = AlgorithmManager.create('AlgorithmDialogMockAlgorithm') dialog.setAlgorithm(alg) dialog.initializeLayout() input_widgets = dialog.findChildren(QLineEdit) self.assertEqual(len(input_widgets), 3)
def _run_load(state, publish_to_cache, use_cached, move_workspace=False, beam_coordinates=None, component=None, output_workspace_names=None): load_alg = AlgorithmManager.createUnmanaged("SANSLoad") load_alg.setChild(True) load_alg.setRethrows(True) load_alg.initialize() state_dict = state.property_manager load_alg.setProperty("SANSState", state_dict) load_alg.setProperty("PublishToCache", publish_to_cache) load_alg.setProperty("UseCached", use_cached) if move_workspace: load_alg.setProperty("Component", component) load_alg.setProperty("BeamCoordinates", beam_coordinates) if output_workspace_names: for name, value in output_workspace_names.items(): load_alg.setProperty(name, value) # Act load_alg.execute() # self.assertTrue(load_alg.isExecuted()) return load_alg
def _run_move(self, state, workspace, move_type, beam_coordinates=None, component=None): move_alg = AlgorithmManager.createUnmanaged("SANSMove") move_alg.setChild(True) move_alg.initialize() state_dict = state.property_manager move_alg.setProperty("SANSState", state_dict) move_alg.setProperty("Workspace", workspace) move_alg.setProperty("MoveType", move_type) if beam_coordinates is not None: move_alg.setProperty("BeamCoordinates", beam_coordinates) if component is not None: move_alg.setProperty("Component", component) # Act move_alg.execute() self.assertTrue(move_alg.isExecuted()) return move_alg
def get_geometry_information_raw(file_name): """ Gets the geometry information form the table workspace with the spb information :param file_name: the full file name to an existing raw file. :return: height, width, thickness and shape """ alg_info = AlgorithmManager.createUnmanaged("RawFileInfo") alg_info.initialize() alg_info.setChild(True) alg_info.setProperty("Filename", file_name) alg_info.setProperty("GetRunParameters", False) alg_info.setProperty("GetSampleParameters", True) alg_info.execute() sample_parameters = alg_info.getProperty("SampleParameterTable").value keys = sample_parameters.getColumnNames() height_id = E_HEIGHT width_id = E_WIDTH thickness_id = E_THICK shape_id = E_GEOM height = sample_parameters.column(keys.index(height_id))[0] width = sample_parameters.column(keys.index(width_id))[0] thickness = sample_parameters.column(keys.index(thickness_id))[0] shape_flag = sample_parameters.column(keys.index(shape_id))[0] shape = convert_to_shape(shape_flag) return height, width, thickness, shape
def _provide_workspace_with_x_errors( self, workspace_name, use_xerror=True, nspec=1, x_in=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], y_in=[2, 2, 2, 2, 2, 2, 2, 2, 2], e_in=[1, 1, 1, 1, 1, 1, 1, 1, 1], x_error=[1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9]): x = [] y = [] e = [] for item in range(0, nspec): x = x + x_in y = y + y_in e = e + e_in ws_alg = AlgorithmManager.createUnmanaged("CreateWorkspace") ws_alg.initialize() ws_alg.setChild(True) ws_alg.setProperty("DataX", x) ws_alg.setProperty("DataY", y) ws_alg.setProperty("DataE", e) ws_alg.setProperty("NSpec", nspec) ws_alg.setProperty("UnitX", "MomentumTransfer") ws_alg.setProperty("OutputWorkspace", workspace_name) ws_alg.execute() ws = ws_alg.getProperty("OutputWorkspace").value if use_xerror: for hists in range(0, nspec): x_error_array = np.asarray(x_error) ws.setDx(hists, x_error_array) return ws
def test_fit_shift_requires_scale_factor(self): alg = AlgorithmManager.create('SANSFitShiftScale') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'ShiftOnly') errors = alg.validateInputs() self.assertTrue('ScaleFactor' in errors)
def setUp(self): FrameworkManagerImpl.Instance() self._alg_factory = AlgorithmFactory.Instance() self._alg_factory.subscribe(_ParamTester) if self._load is None: self.__class__._load = AlgorithmManager.createUnmanaged('Load') self._load.initialize()
def evaluate_function(self, ws_name, fun, out_ws_name): """ Evaluates the guess workspace for the input workspace and function :param ws_name: Name of the workspace in the ADS :param fun: Function to be evaluated :param out_ws_name: Output workspace name :return: Output guess workspace """ workspace = AnalysisDataService.retrieve(ws_name) alg = AlgorithmManager.createUnmanaged('EvaluateFunction') alg.setChild(True) alg.initialize() alg.setProperty('Function', fun) alg.setProperty('InputWorkspace', ws_name) if isinstance(workspace, ITableWorkspace): alg.setProperty('XColumn', self.fit_browser.getXColumnName()) alg.setProperty('YColumn', self.fit_browser.getYColumnName()) if self.fit_browser.getErrColumnName(): alg.setProperty('ErrColumn', self.fit_browser.getErrColumnName()) else: alg.setProperty('WorkspaceIndex', self.fit_browser.workspaceIndex()) alg.setProperty('StartX', self.fit_browser.startX()) alg.setProperty('EndX', self.fit_browser.endX()) alg.setProperty('IgnoreInvalidData', self.fit_browser.ignoreInvalidData()) alg.setProperty('OutputWorkspace', out_ws_name) alg.execute() return alg.getProperty('OutputWorkspace').value
def plot_guess(self): """ Plot the guess curve. """ from mantidqt.plotting.functions import plot fun = self.getFittingFunction() ws_name = self.workspaceName() if fun == '' or ws_name == '': return ws_index = self.workspaceIndex() out_ws_name = '{}_guess'.format(ws_name) alg = AlgorithmManager.createUnmanaged('EvaluateFunction') alg.setChild(True) alg.initialize() alg.setProperty('Function', fun) alg.setProperty('InputWorkspace', ws_name) alg.setProperty('WorkspaceIndex', ws_index) alg.setProperty('OutputWorkspace', out_ws_name) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value plot([out_ws], wksp_indices=[1], fig=self.canvas.figure, overplot=True, plot_kwargs={'label': out_ws_name}) for lin in self.get_lines(): if lin.get_label().startswith(out_ws_name): self.guess_line = lin self.setTextPlotGuess('Remove Guess') self.canvas.draw()
def _load_workspace(self, state): load_alg = AlgorithmManager.createUnmanaged("SANSLoad") load_alg.setChild(True) load_alg.initialize() state_dict = state.property_manager load_alg.setProperty("SANSState", state_dict) load_alg.setProperty("PublishToCache", False) load_alg.setProperty("UseCached", False) load_alg.setProperty("MoveWorkspace", False) load_alg.setProperty("SampleScatterWorkspace", EMPTY_NAME) load_alg.setProperty("SampleScatterMonitorWorkspace", EMPTY_NAME) if state.data.sample_transmission: load_alg.setProperty("SampleTransmissionWorkspace", EMPTY_NAME) if state.data.sample_direct: load_alg.setProperty("SampleDirectWorkspace", EMPTY_NAME) # Act load_alg.execute() self.assertTrue(load_alg.isExecuted()) sample_scatter = load_alg.getProperty("SampleScatterWorkspace").value sample_scatter_monitor_workspace = load_alg.getProperty("SampleScatterMonitorWorkspace").value if state.data.sample_transmission: transmission_workspace = load_alg.getProperty("SampleTransmissionWorkspace").value else: transmission_workspace = None if state.data.sample_direct: direct_workspace = load_alg.getProperty("SampleDirectWorkspace").value else: direct_workspace = None return sample_scatter, sample_scatter_monitor_workspace, transmission_workspace, direct_workspace
def _run_reduction_core(self, state, workspace, monitor, transmission=None, direct=None, detector_type=DetectorType.LAB, component=DataType.Sample): reduction_core_alg = AlgorithmManager.createUnmanaged("SANSReductionCore") reduction_core_alg.setChild(True) reduction_core_alg.initialize() state_dict = state.property_manager reduction_core_alg.setProperty("SANSState", state_dict) reduction_core_alg.setProperty("ScatterWorkspace", workspace) reduction_core_alg.setProperty("ScatterMonitorWorkspace", monitor) if transmission: reduction_core_alg.setProperty("TransmissionWorkspace", transmission) if direct: reduction_core_alg.setProperty("DirectWorkspace", direct) reduction_core_alg.setProperty("Component", DetectorType.to_string(detector_type)) reduction_core_alg.setProperty("DataType", DataType.to_string(component)) reduction_core_alg.setProperty("OutputWorkspace", EMPTY_NAME) reduction_core_alg.setProperty("CalculatedTransmissionWorkspace", EMPTY_NAME) reduction_core_alg.setProperty("UnfittedTransmissionWorkspace", EMPTY_NAME) # Act reduction_core_alg.execute() self.assertTrue(reduction_core_alg.isExecuted()) return reduction_core_alg
def _provide_workspace_with_x_errors(self, workspace_name, use_xerror = True, nspec = 1, x_in = [1,2,3,4,5,6,7,8,9,10], y_in = [2,2,2,2,2,2,2,2,2], e_in = [1,1,1,1,1,1,1,1,1], x_error = [1.1,2.2,3.3,4.4,5.5,6.6,7.7,8.8,9.9, 10.1]): x = [] y = [] e = [] for item in range(0, nspec): x = x + x_in y = y + y_in e = e + e_in ws_alg = AlgorithmManager.createUnmanaged("CreateWorkspace") ws_alg.initialize() ws_alg.setChild(True) ws_alg.setProperty("DataX", x) ws_alg.setProperty("DataY", y) ws_alg.setProperty("DataE", e) ws_alg.setProperty("NSpec", nspec) ws_alg.setProperty("UnitX", "MomentumTransfer") ws_alg.setProperty("OutputWorkspace", workspace_name) ws_alg.execute() ws = ws_alg.getProperty("OutputWorkspace").value if use_xerror: for hists in range(0, nspec): x_error_array = np.asarray(x_error) ws.setDx(hists, x_error_array) return ws
def _run_beam_centre_core(self, state, workspace, monitor, transmission=None, direct=None, detector_type=DetectorType.LAB, component=DataType.Sample, centre_1 = 0.1, centre_2 = -0.1 ,r_min = 0.06, r_max = 0.26): beam_centre_core_alg = AlgorithmManager.createUnmanaged("SANSBeamCentreFinderCore") beam_centre_core_alg.setChild(True) beam_centre_core_alg.initialize() state_dict = state.property_manager beam_centre_core_alg.setProperty("SANSState", state_dict) beam_centre_core_alg.setProperty("ScatterWorkspace", workspace) beam_centre_core_alg.setProperty("ScatterMonitorWorkspace", monitor) if transmission: beam_centre_core_alg.setProperty("TransmissionWorkspace", transmission) if direct: beam_centre_core_alg.setProperty("DirectWorkspace", direct) beam_centre_core_alg.setProperty("Component", DetectorType.to_string(detector_type)) beam_centre_core_alg.setProperty("DataType", DataType.to_string(component)) beam_centre_core_alg.setProperty("Centre1", centre_1) beam_centre_core_alg.setProperty("Centre2", centre_2) beam_centre_core_alg.setProperty("RMax", r_max) beam_centre_core_alg.setProperty("RMin", r_min) beam_centre_core_alg.setProperty("OutputWorkspaceLeft", EMPTY_NAME) beam_centre_core_alg.setProperty("OutputWorkspaceRight", EMPTY_NAME) beam_centre_core_alg.setProperty("OutputWorkspaceTop", EMPTY_NAME) beam_centre_core_alg.setProperty("OutputWorkspaceBottom", EMPTY_NAME) # Act beam_centre_core_alg.execute() self.assertTrue(beam_centre_core_alg.isExecuted()) return beam_centre_core_alg
def _save_workspaces(self, directory): """ Save all workspaces present in the ADS to the given directory :param directory: String; Path to where to save the workspaces """ # Get all present workspaces ws_list = ADS.getObjectNames() if len(ws_list) == 0: return start_time = UsageService.getStartTime().toISO8601String() alg_name = "GeneratePythonScript" alg = AlgorithmManager.createUnmanaged(alg_name, 1) alg.setChild(True) alg.setLogging(False) for index, ws in enumerate(ws_list): if self._empty_group_workspace(ws): continue filename = str(index) + ".py" filename = os.path.join(directory, filename) alg.initialize() alg.setProperty("AppendTimestamp", True) alg.setProperty("AppendExecCount", True) alg.setProperty("InputWorkspace", ws) alg.setPropertyValue("Filename", filename) alg.setPropertyValue("StartTimestamp", start_time) alg.setProperty("IgnoreTheseAlgs", ALGS_TO_IGNORE) alg.setProperty("IgnoreTheseAlgProperties", ALG_PROPERTIES_TO_IGNORE) alg.execute()
def test_finish_handle(self): algorithm = AlgorithmManager.create("MockAlgorithm", -1) observer = MockObserver() observer.observeFinish(algorithm) algorithm.execute() self.assertTrue(observer.finish_handled) self.assertTrue(observer.error_message is None)
def pre_process(self): """ Reduction steps that are meant to be executed only once per set of data files. After this is executed, all files will go through the list of reduction steps. """ Logger("Reducer").information("Setting up reduction options") if self.setup_algorithm is not None: alg = AlgorithmManager.create(self.setup_algorithm) alg.initialize() props = [p.name for p in alg.getProperties()] for key in self.reduction_properties.keys(): if key in props: try: alg.setProperty(key, self.reduction_properties[key]) except: msg = "Error setting %s=%s" % (key, str(self.reduction_properties[key])) msg += "\n %s" % sys.exc_value Logger("Reducer").error(msg) else: Logger("Reducer").warning("Setup algorithm has no %s property" % key) if "ReductionProperties" in props: alg.setPropertyValue("ReductionProperties", self.get_reduction_table_name()) alg.execute()
def test_scale_none_with_can_and_q_merge_range(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [2] * 9) create_alg.execute() single_spectra_input_HAB = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [0.5] * 9) create_alg.execute() smaller_single_spectra_input = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABCountsSample', single_spectra_input_HAB) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('ProcessCan', True) alg.setProperty('HABCountsCan', smaller_single_spectra_input) alg.setProperty('LABCountsCan', smaller_single_spectra_input) alg.setProperty('HABNormCan', single_spectra_input) alg.setProperty('LABNormCan', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('ShiftFactor', 0.0) alg.setProperty('ScaleFactor', 1.0) alg.setProperty('MergeMask', True) alg.setProperty('MergeMin', 2) alg.setProperty('MergeMax', 7) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) y_array = out_ws.readY(0) expected_y_array = [0.5] * 2 + [1.0] * 5 + [1.5] * 2 np.testing.assert_equal(y_array, expected_y_array)
def test_scale_both_without_can(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.setProperty('DataX', range(0, 10)) # HAB as linear function y=x+5 create_alg.setProperty('DataY', range(5, 14)) create_alg.execute() hab_workspace = create_alg.getProperty('OutputWorkspace').value # LAB as linear function y=x+0 create_alg.setProperty('DataY', range(0, 9)) create_alg.execute() lab_workspace = create_alg.getProperty('OutputWorkspace').value # FLAT NORM create_alg.setProperty('DataY', [1] * 9) create_alg.execute() flat_norm = create_alg.getProperty('OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'Both') alg.setProperty('HABCountsSample', hab_workspace) alg.setProperty('LABCountsSample', lab_workspace) alg.setProperty('HABNormSample', flat_norm) alg.setProperty('LABNormSample', flat_norm) alg.setProperty('OutputWorkspace', 'dummy_name') alg.execute() out_ws = alg.getProperty('OutputWorkspace').value out_shift_factor = alg.getProperty('OutShiftFactor').value out_scale_factor = alg.getProperty('OutScaleFactor').value self.assertEquals(out_scale_factor, 1.0) self.assertEquals(out_shift_factor, -5.0) y_array = out_ws.readY(0) expected_y_array = lab_workspace.readY( 0) # We scale and shift to the back (lab) detectors np.testing.assert_equal(y_array, expected_y_array)
def test_scale_none_with_can(self): create_alg = AlgorithmManager.create('CreateWorkspace') create_alg.setChild(True) create_alg.initialize() create_alg.setProperty('DataX', range(0, 10)) create_alg.setProperty('DataY', [1] * 9) create_alg.setProperty('NSpec', 1) create_alg.setProperty('UnitX', 'MomentumTransfer') create_alg.setPropertyValue('OutputWorkspace', 'out_ws') create_alg.execute() single_spectra_input = create_alg.getProperty('OutputWorkspace').value create_alg.setProperty('DataY', [0.5] * 9) create_alg.execute() smaller_single_spectra_input = create_alg.getProperty( 'OutputWorkspace').value alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') alg.setProperty('HABCountsSample', single_spectra_input) alg.setProperty('LABCountsSample', single_spectra_input) alg.setProperty('HABNormSample', single_spectra_input) alg.setProperty('LABNormSample', single_spectra_input) alg.setProperty('ProcessCan', True) alg.setProperty('HABCountsCan', smaller_single_spectra_input) alg.setProperty('LABCountsCan', smaller_single_spectra_input) alg.setProperty('HABNormCan', single_spectra_input) alg.setProperty('LABNormCan', single_spectra_input) alg.setProperty('OutputWorkspace', 'dummy_name') alg.setProperty('ShiftFactor', 0.0) alg.setProperty('ScaleFactor', 1.0) alg.execute() out_ws = alg.getProperty('OutputWorkspace').value self.assertTrue(isinstance(out_ws, MatrixWorkspace)) y_array = out_ws.readY(0) expected_y_array = [0.5] * 9 self.assertTrue( all(map(lambda element: element in y_array, expected_y_array)), msg= 'can gets subtracted so expect 1 - 0.5 as output signal. Proves the can workspace gets used correctly.' )
def find_input_workspace_property(self, algorithm): algm_manager = AlgorithmManager.Instance() alg_instance = algm_manager.createUnmanaged(algorithm[0], algorithm[1]) alg_instance.initialize() for prop in alg_instance.getProperties(): if isinstance(prop, IWorkspaceProperty) and prop.direction in [Direction.Input, Direction.InOut]: return prop.name return None
def test_progress_handle(self): algorithm = AlgorithmManager.create("MockAlgorithm", -1) observer = MockObserver() observer.observeProgress(algorithm) algorithm.execute() self.assertTrue(observer.first_progress_reported) self.assertTrue(observer.second_progress_reported) self.assertEqual(observer.progress_message, 'Half way')
def load_workspace(file_name): alg = AlgorithmManager.createUnmanaged("Load") alg.initialize() alg.setChild(True) alg.setProperty("Filename", file_name) alg.setProperty("OutputWorkspace", "dummy") alg.execute() return alg.getProperty("OutputWorkspace").value
def test_alg_with_overridden_attrs(self): testhelpers.assertRaisesNothing(self, AlgorithmManager.createUnmanaged, "TestPyAlgOverriddenAttrs") alg = AlgorithmManager.createUnmanaged("TestPyAlgOverriddenAttrs") self.assertEquals(alg.name(), "TestPyAlgOverriddenAttrs") self.assertEquals(alg.version(), 2) self.assertEquals(alg.category(), "BestAlgorithms") self.assertEquals(alg.helpURL(), "Optional documentation URL")
def test_isRunning_returning_non_bool_raises_error(self): alg = AlgorithmManager.createUnmanaged("TestPyAlgIsRunningReturnsNonBool") # boost.python automatically downcasts to the most available type # meaning that type(alg)=TestPyAlgIsRunningReturnsNonBool and not the interface # so that any method lookup doesn't go through the base class automatically. # Here we simulate how it would be called on C++ framework side base_running_attr = getattr(IAlgorithm, "isRunning") self.assertRaises(RuntimeError, base_running_attr, alg)
def calculate_background(self, fit_function: IFunction) -> None: """Calculates the background in the counts workspace.""" params = self._get_parameters_for_background_fit(fit_function, create_output=False) function, fit_status, chi_squared = run_Fit( params, AlgorithmManager.create("Fit")) self._handle_background_fit_output(function, fit_status, chi_squared)
def _create_algorithm(self, **kwargs): alg = AlgorithmManager.createUnmanaged("VesuvioPreFit") alg.initialize() alg.setChild(True) alg.setProperty("OutputWorkspace", "__unused") for key, value in kwargs.iteritems(): alg.setProperty(key, value) return alg
def test_createChildAlgorithm_without_name(self): parent_alg = AlgorithmManager.createUnmanaged('Load') with self.assertRaisesRegex(ValueError, "algorithm name"): parent_alg.createChildAlgorithm(startProgress=0.0, endProgress=1.0, enableLogging=False, version=1, **{"XUnit": "Wavelength"})
def test_none_mode_requires_scale_and_shift_factors(self): alg = AlgorithmManager.create('SANSStitch') alg.setChild(True) alg.initialize() alg.setProperty('Mode', 'None') errors = alg.validateInputs() self.assertTrue('ScaleFactor' in errors) self.assertTrue('ShiftFactor' in errors)
def test_createChildAlgorithm_respects_keyword_arguments(self): parent_alg = AlgorithmManager.createUnmanaged('Load') try: child_alg = parent_alg.createChildAlgorithm(name='Rebin',version=1,startProgress=0.5,endProgress=0.9,enableLogging=True) except Exception as exc: self.fail("Expected createChildAlgorithm not to throw but it did: %s" % (str(exc))) # Unknown keyword self.assertRaises(Exception, parent_alg.createChildAlgorithm, name='Rebin',version=1,startProgress=0.5,endProgress=0.9,enableLogging=True, unknownKW=1)
def save_reduction(workspace_names, formats, x_units='DeltaE'): """ Saves the workspaces to the default save directory. @param workspace_names List of workspace names to save @param formats List of formats to save in @param x_units X units """ from mantid.simpleapi import (SaveSPE, SaveNexusProcessed, SaveNXSPE, SaveAscii, Rebin, DeleteWorkspace, ConvertSpectrumAxis, SaveDaveGrp) for workspace_name in workspace_names: if 'spe' in formats: SaveSPE(InputWorkspace=workspace_name, Filename=workspace_name + '.spe') if 'nxs' in formats: SaveNexusProcessed(InputWorkspace=workspace_name, Filename=workspace_name + '.nxs') if 'nxspe' in formats: SaveNXSPE(InputWorkspace=workspace_name, Filename=workspace_name + '.nxspe') if 'ascii' in formats: # Changed to version 2 to enable re-loading of files into mantid saveAsciiAlg = AlgorithmManager.createUnmanaged('SaveAscii', 2) saveAsciiAlg.initialize() saveAsciiAlg.setProperty('InputWorkspace', workspace_name) saveAsciiAlg.setProperty('Filename', workspace_name + '.dat') saveAsciiAlg.execute() if 'aclimax' in formats: if x_units == 'DeltaE_inWavenumber': bins = '24, -0.005, 4000' #cm-1 else: bins = '3, -0.005, 500' #meV Rebin(InputWorkspace=workspace_name, OutputWorkspace=workspace_name + '_aclimax_save_temp', Params=bins) SaveAscii(InputWorkspace=workspace_name + '_aclimax_save_temp', Filename=workspace_name + '_aclimax.dat', Separator='Tab') DeleteWorkspace(Workspace=workspace_name + '_aclimax_save_temp') if 'davegrp' in formats: ConvertSpectrumAxis(InputWorkspace=workspace_name, OutputWorkspace=workspace_name + '_davegrp_save_temp', Target='ElasticQ', EMode='Indirect') SaveDaveGrp(InputWorkspace=workspace_name + '_davegrp_save_temp', Filename=workspace_name + '.grp') DeleteWorkspace(Workspace=workspace_name + '_davegrp_save_temp')
def test_error_handle(self): algorithm = AlgorithmManager.create("MockAlgorithm", -1) algorithm.setProperty("Error", True) observer = MockObserver() observer.observeFinish(algorithm) observer.observeError(algorithm) algorithm.execute() self.assertTrue(observer.finish_handled) self.assertTrue(observer.error_message.startswith('Error in algorithm'))
def save_reduction(worksspace_names, formats, x_units='DeltaE'): """ Saves the workspaces to the default save directory. @param worksspace_names List of workspace names to save @param formats List of formats to save in @param Output X units """ from mantid.simpleapi import (SaveSPE, SaveNexusProcessed, SaveNXSPE, SaveAscii, Rebin, DeleteWorkspace, ConvertSpectrumAxis, SaveDaveGrp) for workspace_name in worksspace_names: if 'spe' in formats: SaveSPE(InputWorkspace=workspace_name, Filename=workspace_name + '.spe') if 'nxs' in formats: SaveNexusProcessed(InputWorkspace=workspace_name, Filename=workspace_name + '.nxs') if 'nxspe' in formats: SaveNXSPE(InputWorkspace=workspace_name, Filename=workspace_name + '.nxspe') if 'ascii' in formats: # Version 1 of SaveAscii produces output that works better with excel/origin # For some reason this has to be done with an algorithm object, using the function # wrapper with Version did not change the version that was run saveAsciiAlg = AlgorithmManager.createUnmanaged('SaveAscii', 1) saveAsciiAlg.initialize() saveAsciiAlg.setProperty('InputWorkspace', workspace_name) saveAsciiAlg.setProperty('Filename', workspace_name + '.dat') saveAsciiAlg.execute() if 'aclimax' in formats: if x_units == 'DeltaE_inWavenumber': bins = '24, -0.005, 4000' #cm-1 else: bins = '3, -0.005, 500' #meV Rebin(InputWorkspace=workspace_name, OutputWorkspace=workspace_name + '_aclimax_save_temp', Params=bins) SaveAscii(InputWorkspace=workspace_name + '_aclimax_save_temp', Filename=workspace_name + '_aclimax.dat', Separator='Tab') DeleteWorkspace(Workspace=workspace_name + '_aclimax_save_temp') if 'davegrp' in formats: ConvertSpectrumAxis(InputWorkspace=workspace_name, OutputWorkspace=workspace_name + '_davegrp_save_temp', Target='ElasticQ', EMode='Indirect') SaveDaveGrp(InputWorkspace=workspace_name + '_davegrp_save_temp', Filename=workspace_name + '.grp') DeleteWorkspace(Workspace=workspace_name + '_davegrp_save_temp')
def create_background_output_workspaces(self, fit_function: IFunction) -> tuple: """Creates the output workspaces for the currently stored background data.""" params = self._get_parameters_for_background_fit(fit_function, create_output=True, max_iterations=0) _, parameter_table_name, _, _, _, covariance_matrix_name = run_Fit( params, AlgorithmManager.create("Fit")) return parameter_table_name, covariance_matrix_name
def test_createChildAlgorithm_with_incorrect_types(self): parent_alg = AlgorithmManager.createUnmanaged('Load') with self.assertRaises(TypeError): parent_alg.createChildAlgorithm("CreateSampleWorkspace", startProgress="0.0", endProgress=1.0, enableLogging=False, version=1, **{"XUnit": "Wavelength"})
def test_set_property_succeeds_with_python_float_lists(self): rebin = AlgorithmManager.createUnmanaged("Rebin") rebin.initialize() input = [0.5, 1.0, 5.5] rebin.setProperty('Params', input) params = rebin.getProperty('Params').value self.assertEquals(len(params), 3) for i in range(3): self.assertEquals(params[i], input[i])
def test_createChildAlgorithm_with_list(self): parent_alg = AlgorithmManager.createUnmanaged('Load') args = { 'FloatInput': [2.3, 4.5], 'IntInput': [1, 2, 3], 'StringInput': ['test1', 'test2'] } child_alg = parent_alg.createChildAlgorithm("_ParamTester", **args) self.assertIsNotNone(child_alg)