def _createDataWorkspace(self, datawsname): """ Create data workspace """ import math tof0 = 4900. delta = 0.001 numpts = 200 vecx = [] vecy = [] vece = [] tof = tof0 for n in range(numpts): vecx.append(tof) vecy.append(math.sin(tof0)) vece.append(1.) tof = tof * (1+delta) # ENDFOR vecx.append(tof) dataws = api.CreateWorkspace(DataX = vecx, DataY = vecy, DataE = vece, NSpec = 1, UnitX = "TOF") # Add to data service AnalysisDataService.addOrReplace(datawsname, dataws) return dataws
def test_exportFileNew(self): """ Test to export logs without header file """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm("ExportExperimentLog", InputWorkspace = "TestMatrixWS", OutputFilename = "TestRecord001.txt", SampleLogNames = ["run_number", "duration", "proton_charge", "proton_charge", "proton_charge"], SampleLogTitles = ["RUN", "Duration", "ProtonCharge", "MinPCharge", "MeanPCharge"], SampleLogOperation = [None, None, "sum", "min", "average"], FileMode = "new") # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print("Unable to open file {0}.".format(outfilename)) self.assertTrue(False) return # Last line cannot be empty, i.e., before EOF '\n' is not allowed lastline = lines[-1] self.assertTrue(len(lastline.strip()) > 0) # Number of lines self.assertEquals(len(lines), 2) # Check line firstdataline = lines[1] terms = firstdataline.strip().split("\t") self.assertEquals(len(terms), 5) # Get property pchargelog = ws.getRun().getProperty("proton_charge").value sumpcharge = numpy.sum(pchargelog) minpcharge = numpy.min(pchargelog) avgpcharge = numpy.average(pchargelog) v2 = float(terms[2]) self.assertAlmostEqual(sumpcharge, v2) v3 = float(terms[3]) self.assertAlmostEqual(minpcharge, v3) v4 = float(terms[4]) self.assertAlmostEqual(avgpcharge, v4) # Remove generated files os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS") return
def setUp(self): self.working_directory = tempfile.mkdtemp() self.ws1_name = "ws1" self.project_ext = ".mtdproj" ADS.addOrReplace(self.ws1_name, CreateSampleWorkspace(OutputWorkspace=self.ws1_name)) project_saver = projectsaver.ProjectSaver(self.project_ext) project_saver.save_project(workspace_to_save=[self.ws1_name], directory=self.working_directory)
def test_add_raises_error_if_name_exists(self): data = [1.0,2.0,3.0] alg = run_algorithm('CreateWorkspace',DataX=data,DataY=data,NSpec=1,UnitX='Wavelength', child=True) name = "testws" ws = alg.getProperty("OutputWorkspace").value AnalysisDataService.addOrReplace(name, ws) self.assertRaises(RuntimeError, AnalysisDataService.add, name, ws)
def _run_createws(self, wsname): """ Run create workspace storing the output in the named workspace """ data = [1.0,2.0,3.0] alg = run_algorithm('CreateWorkspace',DataX=data,DataY=data,NSpec=1,UnitX='Wavelength', child=True) AnalysisDataService.addOrReplace(wsname, alg.getProperty("OutputWorkspace").value)
def _createTwoCurves(self, datawsname): """ Create data workspace """ E = np.arange(-50, 50, 1.0) # curve 1 I = 1000 * np.exp(-E**2/10**2) err = I ** .5 # curve 2 I2 = 1000 * (1+np.sin(E/5*np.pi)) err2 = I ** .5 # workspace ws = WorkspaceFactory.create( "Workspace2D", NVectors=2, XLength = E.size, YLength = I.size ) # curve1 ws.dataX(0)[:] = E ws.dataY(0)[:] = I ws.dataE(0)[:] = err # curve2 ws.dataX(1)[:] = E ws.dataY(1)[:] = I2 ws.dataE(1)[:] = err2 # Add to data service AnalysisDataService.addOrReplace(datawsname, ws) return E, I, err, I2, err2
def test_exportFileAppend(self): """ Test to export logs without header file """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm # create new file alg_test = run_algorithm("ExportExperimentLog", InputWorkspace = "TestMatrixWS", OutputFilename = "TestRecord.txt", SampleLogNames = ["run_number", "duration", "proton_charge"], SampleLogTitles = ["RUN", "Duration", "ProtonCharge"], SampleLogOperation = [None, None, "sum"], FileMode = "new") # append alg_test = run_algorithm("ExportExperimentLog", InputWorkspace = "TestMatrixWS", OutputFilename = "TestRecord.txt", SampleLogNames = ["run_number", "duration", "proton_charge"], SampleLogTitles = ["RUN", "Duration", "ProtonCharge"], SampleLogOperation = [None, None, "sum"], FileMode = "fastappend") # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: print "Output file is %s. " % (outfilename) ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print "Unable to open file %s. " % (outfilename) self.assertTrue(False) return # Last line cannot be empty, i.e., before EOF '\n' is not allowed lastline = lines[-1] self.assertTrue(len(lastline.strip()) > 0) # Number of lines self.assertEquals(len(lines), 3) # Check line firstdataline = lines[1] terms = firstdataline.strip().split("\t") self.assertEquals(len(terms), 3) # # # Remove generated files os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS") return
def add_to_ads(calibration_workspace, full_file_path): """ Add the calibration file to the ADS. The file name is used to publish it to the ADS. :param calibration_workspace: the calibration file which is to be published. :param full_file_path: the file path to the calibration file. """ calibration_workspace_name = get_expected_calibration_workspace_name(full_file_path) AnalysisDataService.addOrReplace(calibration_workspace_name, calibration_workspace)
def test_addOrReplace_replaces_workspace_with_existing_name(self): data = [1.0,2.0,3.0] alg = run_algorithm('CreateWorkspace',DataX=data,DataY=data,NSpec=1,UnitX='Wavelength', child=True) name = "testws" ws = alg.getProperty("OutputWorkspace").value AnalysisDataService.add(name, ws) len_before = len(AnalysisDataService) AnalysisDataService.addOrReplace(name, ws) len_after = len(AnalysisDataService) self.assertEquals(len_after, len_before)
def test_saving_single_workspace(self): ws_saver = workspacesaver.WorkspaceSaver(self.working_directory) ws1 = CreateSampleWorkspace() ws1_name = "ws1" ADS.addOrReplace(ws1_name, ws1) ws_saver.save_workspaces([ws1_name]) list_of_files = listdir(self.working_directory) self.assertEqual(len(list_of_files), 1) self.assertTrue(ws1_name + ".nxs" in list_of_files)
def test_add_raises_error_if_name_exists(self): data = [1.0, 2.0, 3.0] alg = run_algorithm('CreateWorkspace', DataX=data, DataY=data, NSpec=1, UnitX='Wavelength', child=True) name = "testws" ws = alg.getProperty("OutputWorkspace").value AnalysisDataService.addOrReplace(name, ws) self.assertRaises(RuntimeError, AnalysisDataService.add, name, ws)
def test_exportUTC(self): """ Test to export logs without header file """ import os import os.path # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm( "ExportSampleLogsToCSVFile", InputWorkspace="TestMatrixWS", OutputFilename="furnace20339utc.txt", SampleLogNames=["SensorA", "SensorB", "SensorC"], WriteHeaderFile=True, TimeZone="UTC", Header="SensorA[K]\t SensorB[K]\t SensorC[K]", ) # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value filepath = os.path.dirname(outfilename) basename = os.path.basename(outfilename) baseheadername = basename.split(".")[0] + "_header.txt" headerfilename = os.path.join(filepath, baseheadername) try: ifile = open(headerfilename) lines = ifile.readlines() ifile.close() except IOError as err: errmsg = "Unable to open header file %s. " % (headerfilename) self.assertEquals(errmsg, "") return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0: goodlines += 1 self.assertEquals(goodlines, 3) # Clean os.remove(outfilename) os.remove(headerfilename) AnalysisDataService.remove("TestMatrixWS") return
def test_that_x_limits_of_current_dataset_will_return_the_x_limits_of_the_workspace( self): self.model.dataset_names = self.dataset_names workspace = CreateSampleWorkspace() AnalysisDataService.addOrReplace("EMU20884; Group; fwd; Asymmetry", workspace) x_lower, x_upper = self.model.x_limits_of_workspace( self.model.current_dataset_name) self.assertEqual(x_lower, 0.0 - X_OFFSET) self.assertEqual(x_upper, 20000.0 + X_OFFSET)
def test_exportFileNewCSV(self): """ Test to export logs without header file in csv format and with a name not endind with .csv """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm( "ExportExperimentLog", InputWorkspace="TestMatrixWS", OutputFilename="TestRecord.txt", SampleLogNames=["run_number", "duration", "proton_charge"], SampleLogTitles=["RUN", "Duration", "ProtonCharge"], SampleLogOperation=[None, None, "sum"], FileMode="new", FileFormat="comma (csv)") # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value.split( ".txt")[0] + ".csv" try: print "Output file is %s. " % (outfilename) ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print "Unable to open file %s. " % (outfilename) self.assertTrue(False) return # Last line cannot be empty, i.e., before EOF '\n' is not allowed lastline = lines[-1] self.assertTrue(len(lastline.strip()) > 0) # Number of lines self.assertEquals(len(lines), 2) # Check line firstdataline = lines[1] terms = firstdataline.strip().split(",") self.assertEquals(len(terms), 3) # # # Remove generated files os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS") return
def _createOneQCurve(self, datawsname): """ Create data workspace """ Q = np.arange(0, 13, 1.0) I = 1000 * np.exp(-Q**2/10**2) err = I ** .5 # create workspace dataws = api.CreateWorkspace( DataX = Q, DataY = I, DataE = err, NSpec = 1, UnitX = "Momentum") # Add to data service AnalysisDataService.addOrReplace(datawsname, dataws) return Q, I, err
def _createOneCurve(self, datawsname): """ Create data workspace """ E = np.arange(-50, 50, 10.0) I = 1000 * np.exp(-E**2/10**2) err = I ** .5 # create workspace dataws = api.CreateWorkspace( DataX = E, DataY = I, DataE = err, NSpec = 1, UnitX = "Energy") # Add to data service AnalysisDataService.addOrReplace(datawsname, dataws) return E, I, err
def _createOneCurve(self, datawsname): """ Create data workspace """ E = np.arange(-50, 50, 10.0) I = 1000 * np.exp(-E**2/10**2) err = I ** .5 # create workspace dataws = api.CreateWorkspace( DataX = E, DataY = I, DataE = err, NSpec = 1, UnitX = "Energy") # Add to data service AnalysisDataService.addOrReplace(datawsname, dataws) return E, I, err
def __init__(self): super(FitScriptGeneratorStartupTest, self).__init__() self._app = get_application() self.ws_name = "WorkspaceName" test_workspace = CreateWorkspace(DataX=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], DataY=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], NSpec=4, UnitX="Wavelength") AnalysisDataService.addOrReplace(self.ws_name, test_workspace) self.fsg_model = FitScriptGeneratorModel() self.fsg_view = FitScriptGeneratorView(None, FittingMode.SIMULTANEOUS, {"Minimizer": "Levenberg-Marquardt"}) self.fsg_presenter = FitScriptGeneratorPresenter(self.fsg_view, self.fsg_model, [self.ws_name], 1.0, 3.0)
def test_exportUTC(self): """ Test to export logs without header file """ import os import os.path # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm( "ExportSampleLogsToCSVFile", InputWorkspace="TestMatrixWS", OutputFilename="furnace20339utc.txt", SampleLogNames=["SensorA", "SensorB", "SensorC"], WriteHeaderFile=True, TimeZone='UTC', Header="SensorA[K]\t SensorB[K]\t SensorC[K]") # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value filepath = os.path.dirname(outfilename) basename = os.path.basename(outfilename) baseheadername = basename.split(".")[0] + "_header.txt" headerfilename = os.path.join(filepath, baseheadername) try: ifile = open(headerfilename) lines = ifile.readlines() ifile.close() except IOError as err: errmsg = "Unable to open header file %s. " % (headerfilename) self.assertEquals(errmsg, "") return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0: goodlines += 1 self.assertEquals(goodlines, 3) # Clean os.remove(outfilename) os.remove(headerfilename) AnalysisDataService.remove("TestMatrixWS") return
def _run_createws(self, wsname): """ Run create workspace storing the output in the named workspace """ data = [1.0, 2.0, 3.0] alg = run_algorithm('CreateWorkspace', DataX=data, DataY=data, NSpec=1, UnitX='Wavelength', child=True) AnalysisDataService.addOrReplace( wsname, alg.getProperty("OutputWorkspace").value)
def _create_data_workspace(data_ws_name, num_spec, tof0=None, delta=None, num_pts=None): """ Create a multiple spectra data workspace :param data_ws_name: :param num_spec: :return: """ # get base data sets for the workspace as Histograms tof0 = 10000. delta = 0.001 num_pts = 200 list_x = list() list_y = list() list_e = list() tof = tof0 for n in range(num_pts): list_x.append(tof) list_y.append(math.sin(tof0)) list_e.append(1.) tof *= 1+delta # END-FOR list_x.append(tof) vec_x = numpy.array(list_x) vec_y = numpy.array(list_y) vec_e = numpy.array(list_e) # expand to multiple spectra if num_spec > 1: vec_x_orig = vec_x[:] vec_y_orig = vec_y[:] vec_e_orig = vec_e[:] for spec_index in range(1, num_spec): vec_x = numpy.append(vec_x, vec_x_orig) vec_i = vec_y_orig[:] vec_i *= 2 * (spec_index + 1) vec_y = numpy.append(vec_y, vec_i) vec_e = numpy.append(vec_e, vec_e_orig) # END-FOR data_ws = api.CreateWorkspace(DataX=vec_x, DataY=vec_y, DataE=vec_e, NSpec=num_spec, UnitX="TOF") # Add to data service AnalysisDataService.addOrReplace(data_ws_name, data_ws) return data_ws
def test_when_MDWorkspace_is_in_ADS(self): ws_saver = workspacesaver.WorkspaceSaver(self.working_directory) ws1 = CreateMDHistoWorkspace(SignalInput='1,2,3,4,5,6,7,8,9', ErrorInput='1,1,1,1,1,1,1,1,1', Dimensionality='2', Extents='-1,1,-1,1', NumberOfBins='3,3', Names='A,B', Units='U,T') ws1_name = "ws1" ADS.addOrReplace(ws1_name, ws1) ws_saver.save_workspaces([ws1_name]) list_of_files = listdir(self.working_directory) self.assertEqual(len(list_of_files), 1) self.assertTrue(ws1_name + ".nxs" in list_of_files) self._load_MDWorkspace_and_test_it(ws1_name)
def _create_data_workspace(data_ws_name, num_spec, tof0=None, delta=None, num_pts=None): """ Create a multiple spectra data workspace :param data_ws_name: :param num_spec: :return: """ # get base data sets for the workspace as Histograms tof0 = 10000. delta = 0.001 num_pts = 200 list_x = list() list_y = list() list_e = list() tof = tof0 for n in range(num_pts): list_x.append(tof) list_y.append(math.sin(tof0)) list_e.append(1.) tof *= 1+delta # END-FOR list_x.append(tof) vec_x = numpy.array(list_x) vec_y = numpy.array(list_y) vec_e = numpy.array(list_e) # expand to multiple spectra if num_spec > 1: vec_x_orig = vec_x[:] vec_y_orig = vec_y[:] vec_e_orig = vec_e[:] for spec_index in range(1, num_spec): vec_x = numpy.append(vec_x, vec_x_orig) vec_i = vec_y_orig[:] vec_i *= 2 * (spec_index + 1) vec_y = numpy.append(vec_y, vec_i) vec_e = numpy.append(vec_e, vec_e_orig) # END-FOR data_ws = api.CreateWorkspace(DataX=vec_x, DataY=vec_y, DataE=vec_e, NSpec=num_spec, UnitX="TOF") # Add to data service AnalysisDataService.addOrReplace(data_ws_name, data_ws) return data_ws
def test_when_MDWorkspace_is_in_ADS(self): ws_saver = workspacesaver.WorkspaceSaver(self.working_directory) ws1 = CreateMDHistoWorkspace(SignalInput='1,2,3,4,5,6,7,8,9', ErrorInput='1,1,1,1,1,1,1,1,1', Dimensionality='2', Extents='-1,1,-1,1', NumberOfBins='3,3', Names='A,B', Units='U,T') ws1_name = "ws1" ADS.addOrReplace(ws1_name, ws1) ws_saver.save_workspaces([ws1_name]) list_of_files = listdir(self.working_directory) self.assertEqual(len(list_of_files), 1) self.assertTrue(ws1_name + ".nxs" in list_of_files) self._load_MDWorkspace_and_test_it(ws1_name)
def _createOneHistogram(self, datawsname): """ Create data workspace """ E = np.arange(-50.5, 50, 1.0) Ecenters = (E[:-1] + E[1:]) / 2 I = 1000 * np.exp(-Ecenters**2/10**2) err = I ** .5 # create workspace dataws = api.CreateWorkspace( DataX = E, DataY = I, DataE = err, NSpec = 1, UnitX = "Energy(meV)") # Add to data service AnalysisDataService.addOrReplace(datawsname, dataws) return E, I, err
def test_exportFileMissingLog(self): """ Test to export logs without header file """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm( "ExportSampleLogsToCSVFile", InputWorkspace="TestMatrixWS", OutputFilename="furnace20335.txt", SampleLogNames=["SensorA", "SensorB", "SensorX", "SensorC"], WriteHeaderFile=False, ) # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print "Unable to open file %s. " % (outfilename) self.assertTrue(False) return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0: goodlines += 1 self.assertEquals(goodlines, 25) # Check values line0 = lines[0] terms = line0.split() self.assertEquals(len(terms), 6) value2 = float(terms[4]) self.assertEquals(value2, 0.0) # Clean os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS") return
def setUp(self): if self._test_ws is None: self.__class__._test_ws = WorkspaceFactory.Instance().create( "Workspace2D", NVectors=2, YLength=5, XLength=5) if self._test_ws_2 is None: self.__class__._test_ws_2 = WorkspaceFactory.Instance().create( "Workspace2D", NVectors=2, YLength=5, XLength=5) AnalysisDataService.addOrReplace('test_ws', self._test_ws) AnalysisDataService.addOrReplace('test_ws_2', self._test_ws_2) self.get_spectra_selection_patcher = mock.patch('mantidqt.plotting.functions.get_spectra_selection') self.addCleanup(self.get_spectra_selection_patcher.stop) self.get_spectra_selection_mock = self.get_spectra_selection_patcher.start()
def _createOneHistogram(self, datawsname): """ Create data workspace """ E = np.arange(-50.5, 50, 1.0) Ecenters = (E[:-1] + E[1:]) / 2 I = 1000 * np.exp(-Ecenters**2/10**2) err = I ** .5 # create workspace dataws = api.CreateWorkspace( DataX = E, DataY = I, DataE = err, NSpec = 1, UnitX = "Energy(meV)") # Add to data service AnalysisDataService.addOrReplace(datawsname, dataws) return E, I, err
def test_exportFileMissingLog(self): """ Test to export logs without header file """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm( "ExportSampleLogsToCSVFile", InputWorkspace="TestMatrixWS", OutputFilename="furnace20335.txt", SampleLogNames=["SensorA", "SensorB", "SensorX", "SensorC"], WriteHeaderFile=False) # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print("Unable to open file {0}.".format(outfilename)) self.assertTrue(False) return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0: goodlines += 1 self.assertEquals(goodlines, 25) # Check values line0 = lines[0] terms = line0.split() self.assertEquals(len(terms), 6) value2 = float(terms[4]) self.assertEquals(value2, 0.) # Clean os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS") return
def test_addOrReplace_replaces_workspace_with_existing_name(self): data = [1.0, 2.0, 3.0] alg = run_algorithm('CreateWorkspace', DataX=data, DataY=data, NSpec=1, UnitX='Wavelength', child=True) name = "testws" ws = alg.getProperty("OutputWorkspace").value AnalysisDataService.add(name, ws) len_before = len(AnalysisDataService) AnalysisDataService.addOrReplace(name, ws) len_after = len(AnalysisDataService) self.assertEquals(len_after, len_before)
def test_saving_multiple_workspaces(self): ws_saver = workspacesaver.WorkspaceSaver(self.working_directory) ws1 = CreateSampleWorkspace() ws1_name = "ws1" ws2 = CreateSampleWorkspace() ws2_name = "ws2" ADS.addOrReplace(ws1_name, ws1) ADS.addOrReplace(ws2_name, ws2) ws_saver.save_workspaces([ws1_name, ws2_name]) list_of_files = listdir(self.working_directory) self.assertEqual(len(list_of_files), 2) self.assertTrue(ws2_name + ".nxs" in list_of_files) self.assertTrue(ws1_name + ".nxs" in list_of_files)
def calculate_resolution(input_data, mass, index=0): """ Run the VesuvioResolution function to produce a workspace with the value of the Vesuvio resolution. @param input_data The original TOF data @param mass The mass defining the recoil peak in AMU @param index An optional index to specify the spectrum to use """ from mantid.api import AlgorithmManager, AnalysisDataService from mantid.kernel.funcreturns import lhs_info # Grab the name of the variable that this function call is assigned to try: output_name = lhs_info("names")[0] except IndexError: # No variable specified name_stem = str(input_data) output_name = name_stem + "_res" + str(index) function = "name=VesuvioResolution, Mass=%f" % mass # execute the resolution function using fit. # functions can't currently be executed as stand alone objects, # so for now we will run fit with zero iterations to achieve the same result. fit = mantid.api.AlgorithmManager.createUnmanaged('Fit') fit.initialize() fit.setChild(True) fit.setLogging(False) mantid.simpleapi._set_properties(fit, function, input_data, MaxIterations=0, CreateOutput=True, Output=output_name,WorkspaceIndex=index) fit.execute() values_ws = fit.getProperty("OutputWorkspace").value # extract just the function values extract = mantid.api.AlgorithmManager.createUnmanaged('ExtractSingleSpectrum') extract.initialize() extract.setChild(True) extract.setLogging(False) extract.setProperty("InputWorkspace", values_ws) extract.setProperty("OutputWorkspace", "__unused_for_child") extract.setProperty("WorkspaceIndex", 1) extract.execute() calculated = extract.getProperty("OutputWorkspace").value AnalysisDataService.addOrReplace(output_name, calculated) return calculated
def _determine_factors(self, q_high_angle, q_low_angle, mode, scale, shift): # We need to make suret that the fitting only occurs in the y direction constant_x_shift_and_scale = ', f0.Shift=0.0, f0.XScaling=1.0' # Determine the StartQ and EndQ values q_min, q_max = self._get_start_q_and_end_q_values(rear_data=q_low_angle, front_data=q_high_angle) # We need to transfer the errors from the front data to the rear data, as we are using the the front data as a model, but # we want to take into account the errors of both workspaces. error_correction = ErrorTransferFromModelToData() front_data_corrected, rear_data_corrected = error_correction.get_error_corrected(rear_data=q_low_angle, front_data=q_high_angle, q_min=q_min, q_max=q_max) fit = self.createChildAlgorithm('Fit') # We currently have to put the front_data into the ADS so that the TabulatedFunction has access to it front_data_corrected = AnalysisDataService.addOrReplace('front_data_corrected', front_data_corrected) front_in_ads = AnalysisDataService.retrieve('front_data_corrected') function = 'name=TabulatedFunction, Workspace="' + str( front_in_ads.name()) + '"' + ";name=FlatBackground" fit.setProperty('Function', function) fit.setProperty('InputWorkspace', rear_data_corrected) constant_x_shift_and_scale = 'f0.Shift=0.0, f0.XScaling=1.0' if mode == Mode.BothFit: fit.setProperty('Ties', constant_x_shift_and_scale) elif mode == Mode.ShiftOnly: fit.setProperty('Ties', 'f0.Scaling=' + str(scale) + ',' + constant_x_shift_and_scale) elif mode == Mode.ScaleOnly: fit.setProperty('Ties', 'f1.A0=' + str(shift) + '*f0.Scaling,' + constant_x_shift_and_scale) else: raise RuntimeError('Unknown fitting mode requested.') fit.setProperty('StartX', q_min) fit.setProperty('EndX', q_max) fit.setProperty('CreateOutput', True) fit.execute() param = fit.getProperty('OutputParameters').value AnalysisDataService.remove(front_in_ads.name()) # The outparameters are: # 1. Scaling in y direction # 2. Shift in x direction # 3. Scaling in x direction # 4. Shift in y direction scale = param.row(0)['Value'] if scale == 0.0: raise RuntimeError('Fit scaling as part of stitching evaluated to zero') # In order to determine the shift, we need to remove the scale factor shift = param.row(3)['Value'] / scale return (shift, scale)
def test_exportFile2(self): """ Get a partial of real load frame log values, and set them to different logs """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace2() AnalysisDataService.addOrReplace("TestMatrixWS2", ws) # Test algorithm alg_test = run_algorithm( "ExportSampleLogsToCSVFile", InputWorkspace="TestMatrixWS2", OutputFilename="furnace20334.txt", SampleLogNames=["SensorA", "SensorB", "SensorC", "SensorD"], WriteHeaderFile=False, TimeTolerance=1.0) # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print("Unable to open file {0}.".format(outfilename)) self.assertTrue(False) return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0 and len( line.split()) == 6 and line.startswith('76130'): goodlines += 1 self.assertEquals(goodlines, 64) # Remove generated files os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS2") return
def test_exportFile2(self): """ Get a partial of real load frame log values, and set them to different logs """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace2() AnalysisDataService.addOrReplace("TestMatrixWS2", ws) # Test algorithm alg_test = run_algorithm( "ExportSampleLogsToCSVFile", InputWorkspace="TestMatrixWS2", OutputFilename="furnace20334.txt", SampleLogNames=["SensorA", "SensorB", "SensorC", "SensorD"], WriteHeaderFile=False, TimeTolerance=1.0, ) # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print "Unable to open file %s. " % (outfilename) self.assertTrue(False) return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0: goodlines += 1 self.assertEquals(goodlines, 64) # Remove generated files os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS2") return
def add_workspaces_to_analysis_data_service(workspaces, workspace_names, is_monitor): """ Adds a list of workspaces to the ADS. :param workspaces: list of workspaces :param workspace_names: the names under which they are to be published :param is_monitor: if the workspace is a monitor or not """ if is_monitor: workspace_names = [workspace_name + MONITOR_SUFFIX for workspace_name in workspace_names] if len(workspaces) != len(workspace_names): raise RuntimeError("SANSLoad: There is a mismatch between the generated names and the length of" " the WorkspaceGroup. The workspace has {0} entries and there are {1} " "workspace names".format(len(workspaces), len(workspace_names))) for index in range(0, len(workspaces)): if not AnalysisDataService.doesExist(workspace_names[index]): AnalysisDataService.addOrReplace(workspace_names[index], workspaces[index])
def add_workspaces_to_analysis_data_service(workspaces, workspace_names, is_monitor): """ Adds a list of workspaces to the ADS. :param workspaces: list of workspaces :param workspace_names: the names under which they are to be published :param is_monitor: if the workspace is a monitor or not """ if is_monitor: workspace_names = [workspace_name + MONITOR_SUFFIX for workspace_name in workspace_names] if len(workspaces) != len(workspace_names): raise RuntimeError("SANSLoad: There is a mismatch between the generated names and the length of" " the WorkspaceGroup. The workspace has {0} entries and there are {1} " "workspace names".format(len(workspaces), len(workspace_names))) for index in range(0, len(workspaces)): if not AnalysisDataService.doesExist(workspace_names[index]): AnalysisDataService.addOrReplace(workspace_names[index], workspaces[index])
def test_exportFileOnly(self): """ Test to export logs without header file """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm( "ExportSampleLogsToCSVFile", InputWorkspace="TestMatrixWS", OutputFilename="furnace20333.txt", SampleLogNames=["SensorA", "SensorB", "SensorC"], WriteHeaderFile=False) # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print("Unable to open file {0}.".format(outfilename)) self.fail() return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0: goodlines += 1 # ENDIF # ENDFOR self.assertEqual(goodlines, 25) # Remove generated files os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS") return
def test_exportFileOnly(self): """ Test to export logs without header file """ # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm alg_test = run_algorithm("ExportSampleLogsToCSVFile", InputWorkspace = "TestMatrixWS", OutputFilename = "furnace20333.txt", SampleLogNames = ["SensorA", "SensorB", "SensorC"], WriteHeaderFile = False) # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print("Unable to open file {0}.".format(outfilename)) self.assertTrue(False) return # Count lines in the file goodlines = 0 for line in lines: line = line.strip() if len(line) > 0: goodlines += 1 # ENDIF # ENDFOR self.assertEquals(goodlines, 25) # Remove generated files os.remove(outfilename) AnalysisDataService.remove("TestMatrixWS") return
def set_output_workspaces(self, reduction_mode_vs_output_workspaces, reduction_mode_vs_workspace_names): """ Sets the output workspaces which can be HAB, LAB or Merged. At this step we also provide a workspace name to the sample logs which can be used later on for saving :param reduction_mode_vs_output_workspaces: map from reduction mode to output workspace :param reduction_mode_vs_workspace_names: map from reduction mode to output workspace name """ workspace_group_merged = WorkspaceGroup() workspace_group_lab = WorkspaceGroup() workspace_group_hab = WorkspaceGroup() # Note that this breaks the flexibility that we have established with the reduction mode. We have not hardcoded # HAB or LAB anywhere which means that in the future there could be other detectors of relevance. Here we # reference HAB and LAB directly since we currently don't want to rely on dynamic properties. See also in PyInit for reduction_mode, output_workspaces in list( reduction_mode_vs_output_workspaces.items()): workspace_names = reduction_mode_vs_workspace_names[reduction_mode] for output_workspace, output_name in zip(output_workspaces, workspace_names): # In an MPI reduction output_workspace is produced on the master rank, skip others. if output_workspace is None: continue else: AnalysisDataService.addOrReplace(output_name, output_workspace) if reduction_mode is ReductionMode.Merged: workspace_group_merged.addWorkspace(output_workspace) elif reduction_mode is ISISReductionMode.LAB: workspace_group_lab.addWorkspace(output_workspace) elif reduction_mode is ISISReductionMode.HAB: workspace_group_hab.addWorkspace(output_workspace) else: raise RuntimeError( "SANSSingleReduction: Cannot set the output workspace. " "The selected reduction mode {0} is unknown.".format( reduction_mode)) if workspace_group_merged.size() > 0: self.setProperty("OutputWorkspaceMerged", workspace_group_merged) if workspace_group_lab.size() > 0: self.setProperty("OutputWorkspaceLAB", workspace_group_lab) if workspace_group_hab.size() > 0: self.setProperty("OutputWorkspaceHAB", workspace_group_hab)
def set_reduced_can_workspace_on_output(self, output_bundles): """ Sets the reduced can group workspaces on the output properties. The reduced can workspaces can be: LAB Can or HAB Can :param output_bundles: a list of output bundles """ workspace_group_lab_can = WorkspaceGroup() workspace_group_hab_can = WorkspaceGroup() # Find the LAB Can and HAB Can entries if they exist for component_bundle in output_bundles: for output_bundle in component_bundle: if output_bundle.data_type is DataType.Can: reduction_mode = output_bundle.reduction_mode output_workspace = output_bundle.output_workspace # Make sure that the output workspace is not None which can be the case if there has never been a # can set for the reduction. if output_workspace is not None and not does_can_workspace_exist_on_ads( output_workspace): name = self._get_output_workspace_name( output_bundle.state, output_bundle.reduction_mode, can=True) AnalysisDataService.addOrReplace( name, output_workspace) if reduction_mode is ISISReductionMode.LAB: workspace_group_lab_can.addWorkspace( output_workspace) elif reduction_mode is ISISReductionMode.HAB: workspace_group_hab_can.addWorkspace( output_workspace) else: raise RuntimeError( "SANSSingleReduction: The reduction mode {0} should not" " be set with a can.".format(reduction_mode)) if workspace_group_lab_can.size() > 0: # LAB group workspace is non-empty, so we want to set it as output self.setProperty("OutputWorkspaceLABCan", workspace_group_lab_can) if workspace_group_hab_can.size() > 0: self.setProperty("OutputWorkspaceHABCan", workspace_group_hab_can)
def test_that_get_workspace_names_to_display_from_context_will_attempt_to_get_runs_and_groups_for_single_fit_mode(self): workspace_names = ["Name"] workspace = CreateSampleWorkspace() AnalysisDataService.addOrReplace("Name", workspace) runs = "All" group_or_pair = ["long"] self.model.simultaneous_fitting_mode = False self.model._get_selected_groups_and_pairs = mock.MagicMock(return_value=(group_or_pair)) self.model.context.get_workspace_names_for = mock.MagicMock(return_value=workspace_names) self.model.context.get_workspace_names_of_fit_data_with_run = mock.MagicMock() self.assertEqual(self.model.get_workspace_names_to_display_from_context(), workspace_names) self.model._get_selected_groups_and_pairs.assert_called_with() self.model.context.get_workspace_names_for.assert_called_with(runs, group_or_pair, True) self.assertEqual(1, self.model._get_selected_groups_and_pairs.call_count) self.assertEqual(1, self.model.context.get_workspace_names_for.call_count)
def test_only_one_workspace_saving(self): ws1_name = "ws1" ADS.addOrReplace(ws1_name, CreateSampleWorkspace(OutputWorkspace=ws1_name)) project_saver = projectsaver.ProjectSaver(project_file_ext) file_name = working_directory + "/" + os.path.basename(working_directory) + project_file_ext workspaces_string = "\"workspaces\": [\"ws1\"]" project_saver.save_project(workspace_to_save=[ws1_name], directory=working_directory) # Check project file is saved correctly f = open(file_name, "r") file_string = f.read() self.assertTrue(workspaces_string in file_string) # Check workspace is saved list_of_files = os.listdir(working_directory) self.assertEqual(len(list_of_files), 2) self.assertTrue(os.path.basename(working_directory) + project_file_ext in list_of_files) self.assertTrue(ws1_name + ".nxs" in list_of_files)
def _fill_s_2d_workspace(self, s_points=None, workspace=None, protons_number=None, nucleons_number=None): from mantid.api import NumericAxis from abins.constants import MILLI_EV_TO_WAVENUMBER if protons_number is not None: s_points = s_points * self.get_cross_section( scattering=self._scale_by_cross_section, protons_number=protons_number, nucleons_number=nucleons_number) n_q_values, n_freq_bins = s_points.shape n_q_bins = self._q_bins.size assert n_q_values + 1 == n_q_bins if self._energy_units == 'meV': energy_bins = self._bins / MILLI_EV_TO_WAVENUMBER else: energy_bins = self._bins wrk = WorkspaceFactory.create("Workspace2D", NVectors=n_freq_bins, XLength=n_q_bins, YLength=n_q_values) freq_axis = NumericAxis.create(n_freq_bins) freq_offset = (energy_bins[1] - energy_bins[0]) / 2 for i, freq in enumerate(energy_bins[1:]): wrk.setX(i, self._q_bins) wrk.setY(i, s_points[:, i].T) freq_axis.setValue(i, freq + freq_offset) wrk.replaceAxis(1, freq_axis) AnalysisDataService.addOrReplace(workspace, wrk) self.set_workspace_units(workspace, layout="2D", energy_units=self._energy_units)
def show(self, name=''): """ Show the workspace in the ADS inside the WorkspaceGroup structure specified in name name = dirs/../dirs/workspace_name """ if not name and not self.name: raise ValueError("Cannot store workspace in ADS with name : ", str(name)) self.name = str(name) if len(self.name) > 0: # add workspace to ADS if self.is_hidden: AnalysisDataService.addOrReplace(self._workspace_name, self.workspace) self._workspace = None self._is_in_ads = True else: raise ValueError("Cannot store workspace in ADS with name : ", str(name))
def _prepare_workspaces(number_of_workspaces, tagged_workspace_names=None, state=None, reduction_mode=None): create_name = "CreateSampleWorkspace" create_options = {"OutputWorkspace": "test", "NumBanks": 1, "BankPixelWidth": 2, "XMin": 1, "XMax": 10, "BinWidth": 2} create_alg = create_unmanaged_algorithm(create_name, **create_options) for index in range(number_of_workspaces): create_alg.execute() workspace = create_alg.getProperty("OutputWorkspace").value workspace_name = "test" + "_" + str(index) AnalysisDataService.addOrReplace(workspace_name, workspace) if tagged_workspace_names is not None: for key, value in list(tagged_workspace_names.items()): create_alg.execute() workspace = create_alg.getProperty("OutputWorkspace").value AnalysisDataService.addOrReplace(value, workspace) write_hash_into_reduced_can_workspace(state, workspace, reduction_mode, key)
def test_only_one_workspace_saving(self): ws1_name = "ws1" ADS.addOrReplace(ws1_name, CreateSampleWorkspace(OutputWorkspace=ws1_name)) project_saver = projectsaver.ProjectSaver(project_file_ext) file_name = working_directory + "/" + os.path.basename(working_directory) + project_file_ext workspaces_string = "\"workspaces\": [\"ws1\"]" plots_string = "\"plots\": []" project_saver.save_project(workspace_to_save=[ws1_name], directory=working_directory) # Check project file is saved correctly f = open(file_name, "r") file_string = f.read() self.assertTrue(workspaces_string in file_string) self.assertTrue(plots_string in file_string) # Check workspace is saved list_of_files = os.listdir(working_directory) self.assertEqual(len(list_of_files), 2) self.assertTrue(os.path.basename(working_directory) + project_file_ext in list_of_files) self.assertTrue(ws1_name + ".nxs" in list_of_files)
def _prepare_workspaces(number_of_workspaces, tagged_workspace_names=None, state=None): create_name = "CreateSampleWorkspace" create_options = {"OutputWorkspace": "test", "NumBanks": 1, "BankPixelWidth": 2, "XMin": 1, "XMax": 10, "BinWidth": 2} create_alg = create_unmanaged_algorithm(create_name, **create_options) for index in range(number_of_workspaces): create_alg.execute() workspace = create_alg.getProperty("OutputWorkspace").value workspace_name = "test" + "_" + str(index) AnalysisDataService.addOrReplace(workspace_name, workspace) if tagged_workspace_names is not None: for key, value in tagged_workspace_names.items(): create_alg.execute() workspace = create_alg.getProperty("OutputWorkspace").value AnalysisDataService.addOrReplace(value, workspace) write_hash_into_reduced_can_workspace(state, workspace, key)
def _loadRun(self, run, isTrans): """Load a run as an event workspace if slicing is requested, or a histogram workspace otherwise. Transmission runs are always loaded as histogram workspaces.""" event_mode = not isTrans and self._slicingEnabled() args = {'InputRunList': [run], 'EventMode': event_mode} alg = self.createChildAlgorithm('ReflectometryISISPreprocess', **args) alg.setRethrows(True) alg.execute() ws = alg.getProperty('OutputWorkspace').value monitor_ws = alg.getProperty('MonitorWorkspace').value workspace_name = self._prefixedName(_getRunNumberAsString(ws), isTrans) AnalysisDataService.addOrReplace(workspace_name, ws) if monitor_ws: AnalysisDataService.addOrReplace(_monitorWorkspace(workspace_name), monitor_ws) if event_mode: _throwIfNotValidReflectometryEventWorkspace(workspace_name) self.log().information('Loaded event workspace ' + workspace_name) else: self.log().information('Loaded workspace ' + workspace_name) return workspace_name
def _fill_s_1d_workspace(self, s_points=None, workspace=None, protons_number=None, nucleons_number=None): """ Puts 1D S into workspace. :param protons_number: number of protons in the given type fo atom :param nucleons_number: number of nucleons in the given type of atom :param s_points: dynamical factor for the given atom :param workspace: workspace to be filled with S """ if protons_number is not None: s_points = s_points * self._scale * self._get_cross_section(protons_number=protons_number, nucleons_number=nucleons_number) dim = 1 length = s_points.size wrk = WorkspaceFactory.create("Workspace2D", NVectors=dim, XLength=length + 1, YLength=length) for i in range(dim): wrk.getSpectrum(i).setDetectorID(i + 1) wrk.setX(0, self._bins) wrk.setY(0, s_points) AnalysisDataService.addOrReplace(workspace, wrk) # Set correct units on workspace self._set_workspace_units(wrk=workspace)
def _fill_s_1d_workspace(self, s_points=None, workspace=None, protons_number=None, nucleons_number=None): """ Puts 1D S into workspace. :param protons_number: number of protons in the given type fo atom :param nucleons_number: number of nucleons in the given type of atom :param s_points: dynamical factor for the given atom :param workspace: workspace to be filled with S """ if protons_number is not None: s_points = s_points * self._scale * self._get_cross_section(protons_number=protons_number, nucleons_number=nucleons_number) dim = 1 length = s_points.size wrk = WorkspaceFactory.create("Workspace2D", NVectors=dim, XLength=length + 1, YLength=length) for i in range(dim): wrk.getSpectrum(i).setDetectorID(i + 1) wrk.setX(0, self._bins) wrk.setY(0, s_points) AnalysisDataService.addOrReplace(workspace, wrk) # Set correct units on workspace self._set_workspace_units(wrk=workspace)
def _group_workspaces(self, workspaces, output_ws_name): """ Groups all the given workspaces into a group with the given name. If the group already exists it will add them to that group. """ if not self.getProperty(Prop.GROUP_TOF).value: return if AnalysisDataService.doesExist(output_ws_name): ws_group = AnalysisDataService.retrieve(output_ws_name) if not isinstance(ws_group, WorkspaceGroup): raise RuntimeError('Cannot group TOF workspaces, a workspace called TOF already exists') else: for ws in workspaces: if ws not in ws_group: ws_group.add(ws) else: alg = self.createChildAlgorithm("GroupWorkspaces") alg.setProperty("InputWorkspaces", workspaces) alg.setProperty("OutputWorkspace", output_ws_name) alg.execute() ws_group = alg.getProperty("OutputWorkspace").value AnalysisDataService.addOrReplace(output_ws_name, ws_group) return ws_group
def test_exportFileAppend2(self): """ Test to export file in appending mode In this case, the original file will be renamed and a new file will be creatd """ import datetime import time # Generate the matrix workspace with some logs ws = self.createTestWorkspace() AnalysisDataService.addOrReplace("TestMatrixWS", ws) # Test algorithm # create new file alg_test = run_algorithm("ExportExperimentLog", InputWorkspace = "TestMatrixWS", OutputFilename = "TestRecord.txt", SampleLogNames = ["run_number", "duration", "proton_charge"], SampleLogTitles = ["RUN", "Duration", "ProtonCharge"], SampleLogOperation = [None, None, "sum"], FileMode = "new") # append alg_test = run_algorithm("ExportExperimentLog", InputWorkspace = "TestMatrixWS", OutputFilename = "TestRecord.txt", SampleLogNames = ["run_number", "duration", "proton_charge", "SensorA"], SampleLogTitles = ["RUN", "Duration", "ProtonCharge", "SensorA"], SampleLogOperation = [None, None, "sum", "0"], FileMode = "append") # Validate self.assertTrue(alg_test.isExecuted()) # Locate file outfilename = alg_test.getProperty("OutputFilename").value try: print "Output file is %s. " % (outfilename) ifile = open(outfilename) lines = ifile.readlines() ifile.close() except IOError as err: print "Unable to open file %s. " % (outfilename) self.assertTrue(False) return # Last line cannot be empty, i.e., before EOF '\n' is not allowed lastline = lines[-1] self.assertTrue(len(lastline.strip()) > 0) # Number of lines self.assertEquals(len(lines), 2) # Check line firstdataline = lines[1] terms = firstdataline.strip().split("\t") self.assertEquals(len(terms), 4) # Locate the previos file # Rename old file and reset the file mode # Rename the old one: split path from file, new name, and rename fileName, fileExtension = os.path.splitext(outfilename) now = datetime.datetime.now() nowstr = time.strftime("%Y_%B_%d_%H_%M") oldfilename = fileName + "_" + nowstr + fileExtension print "Saved old file is %s. " % (oldfilename) self.assertTrue(os.path.exists(oldfilename)) # Remove generated files os.remove(outfilename) os.remove(oldfilename) AnalysisDataService.remove("TestMatrixWS") return
def PyExec(self): fn = self.getPropertyValue("Filename") wsn = self.getPropertyValue("OutputWorkspace") monitor_workspace_name = self.getPropertyValue("OutputMonitorWorkspace") if monitor_workspace_name == "": self.setPropertyValue("OutputMonitorWorkspace", wsn+'_Monitors') #print (fn, wsn) self.override_angle = self.getPropertyValue("AngleOverride") self.fxml = self.getPropertyValue("InstrumentXML") #load data parms_dict, det_udet, det_count, det_tbc, data = self.read_file(fn) nrows=int(parms_dict['NDET']) #nbins=int(parms_dict['NTC']) xdata = np.array(det_tbc) xdata_mon = np.linspace(xdata[0],xdata[-1], len(xdata)) ydata=data.astype(np.float) ydata=ydata.reshape(nrows,-1) edata=np.sqrt(ydata) #CreateWorkspace(OutputWorkspace=wsn,DataX=xdata,DataY=ydata,DataE=edata, # NSpec=nrows,UnitX='TOF',WorkspaceTitle='Data',YUnitLabel='Counts') nr,nc=ydata.shape ws = WorkspaceFactory.create("Workspace2D", NVectors=nr, XLength=nc+1, YLength=nc) for i in range(nrows): ws.setX(i, xdata) ws.setY(i, ydata[i]) ws.setE(i, edata[i]) ws.getAxis(0).setUnit('tof') AnalysisDataService.addOrReplace(wsn,ws) #self.setProperty("OutputWorkspace", wsn) #print ("ws:", wsn) #ws=mtd[wsn] # fix the x values for the monitor for i in range(nrows-2,nrows): ws.setX(i,xdata_mon) self.log().information("set detector IDs") #set detetector IDs for i in range(nrows): ws.getSpectrum(i).setDetectorID(det_udet[i]) #Sample_logs the header values are written into the sample logs log_names=[str(sl.encode('ascii','ignore').decode()) for sl in parms_dict.keys()] log_values=[str(sl.encode('ascii','ignore').decode()) if isinstance(sl,UnicodeType) else str(sl) for sl in parms_dict.values()] for i in range(len(log_values)): if ('nan' in log_values[i]) or ('NaN' in log_values[i]): log_values[i] = '-1.0' AddSampleLogMultiple(Workspace=wsn, LogNames=log_names,LogValues=log_values) SetGoniometer(Workspace=wsn, Goniometers='Universal') if (self.fxml == ""): LoadInstrument(Workspace=wsn, InstrumentName = "Exed", RewriteSpectraMap= True) else: LoadInstrument(Workspace=wsn, Filename = self.fxml, RewriteSpectraMap= True) try: RotateInstrumentComponent(Workspace=wsn, ComponentName='Tank', Y=1, Angle=-float(parms_dict['phi'].encode('ascii','ignore')), RelativeRotation=False) except: self.log().warning("The instrument does not contain a 'Tank' component. " "This means that you are using a custom XML instrument definition. " "OMEGA_MAG will be ignored.") self.log().warning("Please make sure that the detector positions in the instrument definition are correct.") # Separate monitors into seperate workspace __temp_monitors = ExtractSpectra(InputWorkspace = wsn, WorkspaceIndexList = ','.join([str(s) for s in range(nrows-2, nrows)]), OutputWorkspace = self.getPropertyValue("OutputMonitorWorkspace")) # ExtractSpectra(InputWorkspace = wsn, WorkspaceIndexList = ','.join([str(s) for s in range(nrows-2, nrows)]), # OutputWorkspace = wsn + '_Monitors') MaskDetectors(Workspace = wsn, WorkspaceIndexList = ','.join([str(s) for s in range(nrows-2, nrows)])) RemoveMaskedSpectra(InputWorkspace = wsn, OutputWorkspace = wsn) self.setProperty("OutputWorkspace", wsn) self.setProperty("OutputMonitorWorkspace", __temp_monitors)
def create_absorption_input( # noqa: C901 filename, props=None, num_wl_bins=1000, material={}, geometry={}, environment={}, opt_wl_min=0, opt_wl_max=Property.EMPTY_DBL, metaws=None, ): """ Create an input workspace for carpenter or other absorption corrections :param filename: Input file to retrieve properties from the sample log :param props: PropertyManager of run characterizations, obtained from PDDetermineCharacterizations :param num_wl_bins: The number of wavelength bins used for absorption correction :param material: Optional material to use in SetSample :param geometry: Optional geometry to use in SetSample :param environment: Optional environment to use in SetSample :param opt_wl_min: Optional minimum wavelength. If specified, this is used instead of from the props :param opt_wl_max: Optional maximum wavelength. If specified, this is used instead of from the props :param metaws: Optional workspace name with metadata to use for donor workspace instead of reading from filename :return: Name of the donor workspace created """ def confirmProps(props): '''This function will throw an exception if the PropertyManager is not defined correctly. It should only be called if the value is needed.''' if props is None: raise ValueError( "props is required to create donor workspace, props is None") if not isinstance(props, PropertyManager): raise ValueError("props must be a PropertyManager object") log = Logger('CreateAbsorptionInput') # Load from file if no workspace with metadata has been given, otherwise avoid a duplicate load with the metaws absName = metaws if metaws is None: absName = '__{}_abs'.format(_getBasename(filename)) allowed_log = ",".join([ 'SampleFormula', 'SampleDensity', "BL11A:CS:ITEMS:HeightInContainerUnits", "SampleContainer", "SampleMass" ]) Load(Filename=filename, OutputWorkspace=absName, MetaDataOnly=True, AllowList=allowed_log) # attempt to get the wavelength from the function parameters if opt_wl_min > 0.: wl_min = opt_wl_min else: # or get it from the PropertyManager confirmProps(props) wl_min = props['wavelength_min'].value if opt_wl_max != Property.EMPTY_DBL: wl_max = opt_wl_max else: # or get it from the PropertyManager confirmProps(props) wl_max = props['wavelength_max'].value # unset value is 0. # if it isn't found by this point, guess it from the time-of-flight range if wl_min == 0. or wl_max == 0.: confirmProps(props) tof_min = props['tof_min'].value tof_max = props['tof_max'].value if tof_min >= 0. and tof_max > tof_min: log.information('TOF range is {} to {} microseconds'.format( tof_min, tof_max)) # determine L1 instr = mtd[absName].getInstrument() L1 = instr.getSource().getDistance(instr.getSample()) # determine L2 range PreprocessDetectorsToMD(InputWorkspace=absName, OutputWorkspace=absName + '_dets', GetMaskState=False) L2 = mtd[absName + '_dets'].column('L2') Lmin = np.min(L2) + L1 Lmax = np.max(L2) + L1 DeleteWorkspace(Workspace=absName + '_dets') log.information('Distance range is {} to {} meters'.format( Lmin, Lmax)) # wavelength is h*TOF / m_n * L values copied from Kernel/PhysicalConstants.h usec_to_sec = 1.e-6 meter_to_angstrom = 1.e10 h_m_n = meter_to_angstrom * usec_to_sec * 6.62606896e-34 / 1.674927211e-27 if wl_min == 0.: wl_min = h_m_n * tof_min / Lmax if wl_max == 0.: wl_max = h_m_n * tof_max / Lmin # there isn't a good way to guess it so error out if wl_max <= wl_min: DeleteWorkspace(Workspace=absName) # no longer needed raise RuntimeError('Invalid wavelength range min={}A max={}A'.format( wl_min, wl_max)) log.information('Using wavelength range min={}A max={}A'.format( wl_min, wl_max)) absorptionWS = WorkspaceFactory.create( mtd[absName], NVectors=mtd[absName].getNumberHistograms(), XLength=num_wl_bins + 1, YLength=num_wl_bins) xaxis = np.arange(0., float(num_wl_bins + 1)) * (wl_max - wl_min) / ( num_wl_bins) + wl_min for i in range(absorptionWS.getNumberHistograms()): absorptionWS.setX(i, xaxis) absorptionWS.getAxis(0).setUnit('Wavelength') # this effectively deletes the metadata only workspace AnalysisDataService.addOrReplace(absName, absorptionWS) # cleanup inputs before delegating work if not material: material = {} if not geometry: geometry = {} if not environment: environment = {} # Make sure one is set before calling SetSample if material or geometry or environment: mantid.simpleapi.SetSampleFromLogs(InputWorkspace=absName, Material=material, Geometry=geometry, Environment=environment) return absName
def store_in_ads_as_hidden(workspace_name, workspace): AnalysisDataService.addOrReplace(workspace_name, workspace)
def store_in_ads_as_hidden(workspace_name, workspace): AnalysisDataService.addOrReplace(workspace_name, workspace)
def test_confirm_all_workspaces_loaded(self): ws1_name = "ws1" ADS.addOrReplace(ws1_name, CreateSampleWorkspace(OutputWorkspace=ws1_name)) self.assertTrue(projectloader._confirm_all_workspaces_loaded(workspaces_to_confirm=[ws1_name]))
def setUp(self): ws1_name = "ws1" ADS.addOrReplace(ws1_name, CreateSampleWorkspace(OutputWorkspace=ws1_name)) project_saver = projectsaver.ProjectSaver(project_file_ext) project_saver.save_project(workspace_to_save=[ws1_name], file_name=working_project_file)