def _create_peaks_workspace(self): """Create a dummy peaks workspace""" path = FileFinder.getFullPath("IDFs_for_UNIT_TESTING/MINITOPAZ_Definition.xml") inst = LoadEmptyInstrument(Filename=path) ws = CreatePeaksWorkspace(inst, 0) DeleteWorkspace(inst) SetUB(ws, 1, 1, 1, 90, 90, 90) # Add a bunch of random peaks that happen to fall on the # detetor bank defined in the IDF center_q = np.array([-5.1302,2.5651,3.71809]) qs = [] for i in np.arange(0, 1, 0.1): for j in np.arange(-0.5, 0, 0.1): q = center_q.copy() q[1] += j q[2] += i qs.append(q) # Add the peaks to the PeaksWorkspace with dummy values for intensity, # Sigma, and HKL for q in qs: peak = ws.createPeak(q) peak.setIntensity(100) peak.setSigmaIntensity(10) peak.setHKL(1, 1, 1) ws.addPeak(peak) return ws
def _run(self): '''Defines the workflow for the test''' self.tolerance = 1e-3 self.samples = [sample[:-4] for sample in self.samples] # Load files into Mantid for sample in self.samples: LoadNexus(sample, OutputWorkspace=sample) LoadNexus(FileFinder.getFullPath(self.resolution), OutputWorkspace=self.resolution) _, iqt_ws = TransformToIqt(SampleWorkspace=self.samples[0], ResolutionWorkspace=self.resolution, EnergyMin=self.e_min, EnergyMax=self.e_max, BinReductionFactor=self.num_bins, DryRun=False, NumberOfIterations=200) # Test IqtFit Sequential iqtfitSeq_ws, params, fit_group = IqtFitSequential(InputWorkspace=iqt_ws, Function=self.func, StartX=self.startx, EndX=self.endx, SpecMin=0, SpecMax=self.spec_max) self.result_names = [iqt_ws.name(), iqtfitSeq_ws[0].name()] # Remove workspaces from Mantid for sample in self.samples: DeleteWorkspace(sample) DeleteWorkspace(params) DeleteWorkspace(fit_group) DeleteWorkspace(self.resolution)
def cleanup(self): Files = ["TOPAZ_3132.hkl", "TOPAZ_3132FFT.hkl"] for file in Files: absfile = FileFinder.getFullPath(file) if os.path.exists(absfile): os.remove(absfile) return True
def find_full_file_path(file_name): """ Gets the full path of a file name if it is available on the Mantid paths. :param file_name: the name of the file. :return: the full file path. """ return FileFinder.getFullPath(file_name)
def find_data(file, instrument='', allow_multiple=False): """ Finds a file path for the specified data set, which can either be: - a run number - an absolute path - a file name @param file: file name or part of a file name @param instrument: if supplied, FindNeXus will be tried as a last resort """ # First, assume a file name file = str(file).strip() # If we allow multiple files, users may use ; as a separator, # which is incompatible with the FileFinder n_files = 1 if allow_multiple: file=file.replace(';',',') toks = file.split(',') n_files = len(toks) instrument = str(instrument) file_path = FileFinder.getFullPath(file) if os.path.isfile(file_path): return file_path # Second, assume a run number and pass the instrument name as a hint try: # FileFinder doesn't like dashes... instrument=instrument.replace('-','') f = FileFinder.findRuns(instrument+file) if os.path.isfile(f[0]): if allow_multiple: # Mantid returns its own list object type, so make a real list out if it if len(f)==n_files: return [i for i in f] else: return f[0] except: # FileFinder couldn't make sense of the the supplied information pass # Third, assume a run number, without instrument name to take care of list of full paths try: f = FileFinder.findRuns(file) if os.path.isfile(f[0]): if allow_multiple: # Mantid returns its own list object type, so make a real list out if it if len(f)==n_files: return [i for i in f] else: return f[0] except: # FileFinder couldn't make sense of the the supplied information pass # If we didn't find anything, raise an exception Logger('find_data').error("\n\nCould not find a file for %s: check your reduction parameters\n\n" % str(file)) raise RuntimeError("Could not find a file for %s" % str(file))
def do_cleanup(): Files = ["PG3_9829.gsa", "PG3_9829.py", "PG3_9830.gsa", "PG3_9830.py"] for file in Files: absfile = FileFinder.getFullPath(file) if os.path.exists(absfile): os.remove(absfile) return True
def do_cleanup(): Files = ["BioSANS_test_data_reduction.log", "BioSANS_test_data_Iq.xml", "BioSANS_test_data_Iq.txt", "BioSANS_test_data_Iqxy.dat"] for filename in Files: absfile = FileFinder.getFullPath(filename) if os.path.exists(absfile): os.remove(absfile) return True
def runTest(self): UseCompatibilityMode() SANS2D() Set1D() Detector("rear-detector") MaskFile('MASKSANS2Doptions.091A') Gravity(True) csv_file = FileFinder.getFullPath('SANS2D_multiPeriodTests.csv') BatchReduce(csv_file, 'nxs', saveAlgs={}) self.reduced = '5512_SANS2DBatch'
def runTest(self): UseCompatibilityMode() SANS2D() Set1D() Detector("rear-detector") MaskFile('MASKSANS2Doptions.091A') Gravity(True) csv_file = FileFinder.getFullPath('SANS2D_periodTests.csv') BatchReduce(csv_file, 'nxs', plotresults=False, saveAlgs={'SaveCanSAS1D': 'xml', 'SaveNexus': 'nxs'}) os.remove(os.path.join(config['defaultsave.directory'], '5512p7_SANS2DBatch.xml'))
def __init__(self): ISISIndirectInelasticConvFit.__init__(self) self.sample = 'osi97935_graphite002_red.nxs' self.resolution = FileFinder.getFullPath('osi97935_graphite002_res.nxs') #ConvFit fit function self.func = 'name=LinearBackground,A0=0,A1=0;(composite=Convolution,FixResolution=true,NumDeriv=true;'\ 'name=Resolution,Workspace=\"%s\";name=Lorentzian,Amplitude=2,PeakCentre=0,FWHM=0.05)' % self.resolution self.startx = -0.2 self.endx = 0.2 self.bg = 'Fit Linear' self.spectra_min = 0 self.spectra_max = 41 self.ties = False self.result_names = ['osi97935_graphite002_conv_1LFitL_s0_to_41_Result']
def __init__(self): ISISIndirectInelasticConvFit.__init__(self) self.sample = 'irs53664_graphite002_red.nxs' self.resolution = FileFinder.getFullPath('irs53664_graphite002_res.nxs') #ConvFit fit function self.func = 'name=LinearBackground,A0=0.060623,A1=0.001343;(composite=Convolution,FixResolution=true,NumDeriv=true;'\ 'name=Resolution,Workspace=\"%s\";name=Lorentzian,Amplitude=1.033150,PeakCentre=-0.000841,FWHM=0.001576)'\ % (self.resolution) self.startx = -0.2 self.endx = 0.2 self.bg = 'Fit Linear' self.spectra_min = 0 self.spectra_max = 50 self.ties = False self.result_names = ['irs53664_graphite002_conv_1LFitL_s0_to_50_Result']
def _run(self): '''Defines the workflow for the test''' self.tolerance = 1e-4 LoadNexus(self.sample, OutputWorkspace=self.sample) LoadNexus(FileFinder.getFullPath(self.resolution), OutputWorkspace=self.resolution) convfitSeq_ws, params, fit_group = ConvolutionFitSequential(InputWorkspace=self.sample, Function=self.func, PassWSIndexToFunction=self.passWSIndexToFunction, StartX=self.startx, EndX=self.endx, SpecMin=self.spectra_min, SpecMax=self.spectra_max, PeakRadius=5) self.result_names = [convfitSeq_ws[0].name()]
def __verifyRequiredFile(self, filename): '''Return True if the specified file name is findable by Mantid.''' from mantid.api import FileFinder # simple way is just getFullPath which never uses archive search if os.path.exists(FileFinder.getFullPath(filename)): return True # try full findRuns which will use archive search if it is turned on try: candidates = FileFinder.findRuns(filename) for item in candidates: if os.path.exists(item): return True except RuntimeError, e: return False
def do_cleanup(): Files = ["PG3_9829.getn", "PG3_9829.gsa", "PG3_9829.py", 'sum_PG3_9829.gsa', 'sum_PG3_9829.py', "PG3_9830.gsa", "PG3_9830.py", "PG3_4844-1.dat", "PG3_4844.getn", "PG3_4844.gsa", "PG3_4844.py", "PG3_4866.gsa"] for filename in Files: absfile = FileFinder.getFullPath(filename) if os.path.exists(absfile): os.remove(absfile) return True
def __init__(self): ISISIndirectInelasticConvFit.__init__(self) self.sample = 'osi97935_graphite002_red.nxs' self.resolution = FileFinder.getFullPath( 'osi97935_graphite002_res.nxs') #ConvFit fit function self.func = 'name=LinearBackground,A0=0,A1=0;(composite=Convolution,FixResolution=true,NumDeriv=true;'\ 'name=Resolution,Workspace=\"%s\";name=Lorentzian,Amplitude=2,PeakCentre=0,FWHM=0.05)' % self.resolution self.startx = -0.2 self.endx = 0.2 self.bg = 'Fit Linear' self.spectra_min = 0 self.spectra_max = 41 self.ties = False self.result_names = [ 'osi97935_graphite002_conv_1LFitL_s0_to_41_Result' ]
def _run(self): '''Defines the workflow for the test''' self.tolerance = 1e-4 LoadNexus(self.sample, OutputWorkspace=self.sample) LoadNexus(FileFinder.getFullPath(self.resolution), OutputWorkspace=self.resolution) convfitSeq_ws, params, fit_group = ConvolutionFitSequential( InputWorkspace=self.sample, Function=self.func, PassWSIndexToFunction=self.passWSIndexToFunction, StartX=self.startx, EndX=self.endx, SpecMin=self.spectra_min, SpecMax=self.spectra_max, PeakRadius=5) self.result_names = [convfitSeq_ws[0].name()]
def __init__(self): ISISIndirectInelasticConvFit.__init__(self) self.sample = 'irs53664_graphite002_red.nxs' self.resolution = FileFinder.getFullPath( 'irs53664_graphite002_res.nxs') #ConvFit fit function self.func = 'name=LinearBackground,A0=0.060623,A1=0.001343;(composite=Convolution,FixResolution=true,NumDeriv=true;'\ 'name=Resolution,Workspace=\"%s\";name=Lorentzian,Amplitude=1.033150,PeakCentre=-0.000841,FWHM=0.001576)'\ % (self.resolution) self.startx = -0.2 self.endx = 0.2 self.bg = 'Fit Linear' self.spectra_min = 0 self.spectra_max = 50 self.ties = False self.result_names = [ 'irs53664_graphite002_conv_1LFitL_s0_to_50_Result' ]
def __verifyRequiredFile(self, filename): '''Return True if the specified file name is findable by Mantid.''' from mantid.api import FileFinder # simple way is just getFullPath which never uses archive search if os.path.exists(FileFinder.getFullPath(filename)): return True # try full findRuns which will use archive search if it is turned on try: candidates = FileFinder.findRuns(filename) for item in candidates: if os.path.exists(item): return True except RuntimeError as e: return False # file was not found return False
def runTest(self): UseCompatibilityMode() LOQ() Detector("main-detector-bank") csv_file = FileFinder.getFullPath('batch_input.csv') Set1D() MaskFile('MASK.094AA') Gravity(True) BatchReduce(csv_file, 'raw', plotresults=False, saveAlgs={'SaveCanSAS1D': 'xml', 'SaveNexus': 'nxs'}) LoadNexus(Filename='54433sans.nxs', OutputWorkspace='result') Plus(LHSWorkspace='result', RHSWorkspace='99630sanotrans', OutputWorkspace= 'result') os.remove(os.path.join(config['defaultsave.directory'],'54433sans.nxs')) os.remove(os.path.join(config['defaultsave.directory'],'99630sanotrans.nxs')) os.remove(os.path.join(config['defaultsave.directory'],'54433sans.xml')) os.remove(os.path.join(config['defaultsave.directory'],'99630sanotrans.xml'))
def on_user_file_load(self): """ Loads the user file. Populates the models and the view. """ try: # 1. Get the user file path from the view user_file_path = self._view.get_user_file_path() if not user_file_path: return # 2. Get the full file path user_file_path = FileFinder.getFullPath(user_file_path) if not os.path.exists(user_file_path): raise RuntimeError( "The user path {} does not exist. Make sure a valid user file path" " has been specified.".format(user_file_path)) self._table_model.user_file = user_file_path # Clear out the current view self._view.reset_all_fields_to_default() # 3. Read and parse the user file user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() # 4. Populate the model self._state_model = StateGuiModel(user_file_items) # 5. Update the views. self._update_view_from_state_model() self._beam_centre_presenter.update_centre_positions( self._state_model) self._beam_centre_presenter.on_update_rows() self._masking_table_presenter.on_update_rows() self._workspace_diagnostic_presenter.on_user_file_load( user_file_path) except Exception as e: self.sans_logger.error( "Loading of the user file failed. {}".format(str(e))) self.display_warning_box('Warning', 'Loading of the user file failed.', str(e))
def on_mask_file_add(self): """ We get the added mask file name and add it to the list of masks """ new_mask_file = self._view.get_mask_file() if not new_mask_file: return new_mask_file_full_path = FileFinder.getFullPath(new_mask_file) if not new_mask_file_full_path: return # Add the new mask file to state model mask_files = self._state_model.mask_files mask_files.append(new_mask_file) self._state_model.mask_files = mask_files # Make sure that the sub-presenters are up to date with this change self._masking_table_presenter.on_update_rows() self._settings_diagnostic_tab_presenter.on_update_rows()
def runTest(self): UseCompatibilityMode() SANS2D() Set1D() Detector("rear-detector") MaskFile('MASKSANS2Doptions.091A') Gravity(True) csv_file = FileFinder.getFullPath('SANS2D_periodTests.csv') BatchReduce(csv_file, 'nxs', plotresults=False, saveAlgs={ 'SaveCanSAS1D': 'xml', 'SaveNexus': 'nxs' }) os.remove( os.path.join( config['defaultsave.directory'], '5512p7_SANS2DBatch_p7rear_1D_2.0_14.0Phi-45.0_45.0.xml'))
def runTest(self): UseCompatibilityMode() config['default.instrument'] = 'SANS2D' SANS2DTUBES() Set1D() Detector("rear-detector") # This contains two MASKFILE commands, each resulting in a seperate call to MaskDetectors. MaskFile('SANS2DTube_ZerroErrorFreeTest.txt') # Saves a file which produces an output file which does not contain any zero errors csv_file = FileFinder.getFullPath("SANS2DTUBES_ZeroErrorFree_batch.csv") save_alg = {"SaveNexus": "nxs"} BatchReduce(csv_file, 'nxs', saveAlgs=save_alg, plotresults=False, save_as_zero_error_free=True) DeleteWorkspace('zero_free_out') # The zero correction only occurs for the saved files. Stephen King mentioned that the # original workspaces should not be tampered with self._final_output = os.path.join(config['defaultsave.directory'], 'zero_free_out.nxs') self._final_workspace = 'ws' Load(Filename=self._final_output, OutputWorkspace=self._final_workspace)
def runTest(self): UseCompatibilityMode() config['default.instrument'] = 'SANS2D' SANS2D() Set1D() Detector("rear-detector") # This contains two MASKFILE commands, each resulting in a separate call to MaskDetectors. MaskFile('MaskSANS2DReductionGUI_MaskFiles.txt') Gravity(True) # This does 2 separate reductions of the same data, but saving the result of each to a different workspace. csv_file = FileFinder.getFullPath("SANS2D_mask_batch.csv") BatchReduce(csv_file, 'nxs', plotresults=False) path1 = os.path.join(config['defaultsave.directory'], 'iteration_1.xml') path2 = os.path.join(config['defaultsave.directory'], 'iteration_2.xml') if os.path.exists(path1): os.remove(path1) if os.path.exists(path2): os.remove(path2)
def __init__(self): ISISIndirectInelasticConvFit.__init__(self) self.sample = 'irs53664_graphite002_red.nxs' self.resolution = FileFinder.getFullPath( 'irs53664_graphite002_res.nxs') #ConvFit fit function self.func = 'name=LinearBackground,A0=0.060623,A1=0.001343;' \ '(composite=Convolution,FixResolution=true,NumDeriv=true;' \ 'name=Resolution,Workspace="%s";name=Lorentzian,Amplitude=1.033150,FWHM=0.001576,'\ 'ties=(PeakCentre=0.0),constraints=(FWHM>0.001))' % self.resolution self.passWSIndexToFunction = False # irs53664_graphite002_res is single histogram self.startx = -0.2 self.endx = 0.2 self.bg = 'Fit Linear' self.spectra_min = 0 self.spectra_max = 50 self.ties = False self.result_names = [ 'irs53664_graphite002_conv_1LFitL_s0_to_50_Result' ]
def _create_states(self, state_model, table_model, row_index=None): """ Here we create the states based on the settings in the models :param state_model: the state model object :param table_model: the table model object :param row_index: the selected row, if None then all rows are generated """ number_of_rows = self._view.get_number_of_rows() if row_index is not None: # Check if the selected index is valid if row_index >= number_of_rows: return None rows = [row_index] else: rows = range(number_of_rows) states = {} gui_state_director = GuiStateDirector(table_model, state_model, self._facility) for row in rows: self.sans_logger.information( "Generating state for row {}".format(row)) if not self.is_empty_row(row): row_user_file = table_model.get_row_user_file(row) if row_user_file: user_file_path = FileFinder.getFullPath(row_user_file) if not os.path.exists(user_file_path): raise RuntimeError( "The user path {} does not exist. Make sure a valid user file path" " has been specified.".format(user_file_path)) user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() row_state_model = StateGuiModel(user_file_items) row_gui_state_director = GuiStateDirector( table_model, row_state_model, self._facility) self._create_row_state(row_gui_state_director, states, row) else: self._create_row_state(gui_state_director, states, row) return states
def on_user_file_load(self): """ Loads the user file. Populates the models and the view. """ try: # 1. Get the user file path from the view user_file_path = self._view.get_user_file_path() if not user_file_path: return # 2. Get the full file path user_file_path = FileFinder.getFullPath(user_file_path) if not os.path.exists(user_file_path): raise RuntimeError( "The user path {} does not exist. Make sure a valid user file path" " has been specified.".format(user_file_path)) # Clear out the current view self._view.reset_all_fields_to_default() # 3. Read and parse the user file user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() # 4. Populate the model self._state_model = StateGuiModel(user_file_items) # 5. Update the views. self._update_view_from_state_model() # 6. Perform calls on child presenters self._masking_table_presenter.on_update_rows() self._settings_diagnostic_tab_presenter.on_update_rows() except Exception as e: self.sans_logger.error( "Loading of the user file failed. Ensure that the path to your files has been added " "to the Mantid search directories! See here for more details: {}" .format(str(e)))
def do_cleanup(): Files = ['PG3_9829.getn', 'PG3_9829.gsa', 'PG3_9829.py', 'sum_PG3_9829.gsa', 'sum_PG3_9829.py', 'PG3_9830.gsa', 'PG3_9830.py', 'PG3_4844-1.dat', 'PG3_4844.getn', 'PG3_4844.gsa', 'PG3_4844.py', 'PG3_4866.gsa', 'PG3_46577.nxs', 'PG3_46577.py', 'PP_absorption_PG3_46577.nxs', 'PP_absorption_PG3_46577.py'] for filename in Files: absfile = FileFinder.getFullPath(filename) if os.path.exists(absfile): os.remove(absfile) return True
def runTest(self): UseCompatibilityMode() LOQ() Detector("main-detector-bank") csv_file = FileFinder.getFullPath('batch_input.csv') Set1D() MaskFile('MASK.094AA') Gravity(True) BatchReduce(csv_file, 'raw', plotresults=False, saveAlgs={ 'SaveCanSAS1D': 'xml', 'SaveNexus': 'nxs' }) LoadNexus(Filename='54433sans_main_1D_2.2_10.0.nxs', OutputWorkspace='result') Plus(LHSWorkspace='result', RHSWorkspace='99630sanotrans_main_1D_2.2_10.0', OutputWorkspace='result') os.remove( os.path.join(config['defaultsave.directory'], '54433sans_main_1D_2.2_10.0.nxs')) os.remove( os.path.join(config['defaultsave.directory'], '99630sanotrans_main_1D_2.2_10.0.nxs')) os.remove( os.path.join(config['defaultsave.directory'], '54433sans_main_1D_2.2_10.0.xml')) os.remove( os.path.join(config['defaultsave.directory'], '99630sanotrans_main_1D_2.2_10.0.xml'))
def _run(self): '''Defines the workflow for the test''' self.tolerance = 1e-3 self.samples = [sample[:-4] for sample in self.samples] # Load files into Mantid for sample in self.samples: LoadNexus(sample, OutputWorkspace=sample) LoadNexus(FileFinder.getFullPath(self.resolution), OutputWorkspace=self.resolution) _, iqt_ws = TransformToIqt(SampleWorkspace=self.samples[0], ResolutionWorkspace=self.resolution, EnergyMin=self.e_min, EnergyMax=self.e_max, BinReductionFactor=self.num_bins, DryRun=False, NumberOfIterations=200) # Test IqtFit Sequential iqtfitSeq_ws, params, fit_group = IqtFitSequential( InputWorkspace=iqt_ws, Function=self.func, StartX=self.startx, EndX=self.endx, SpecMin=0, SpecMax=self.spec_max) self.result_names = [iqt_ws.name(), iqtfitSeq_ws[0].name()] # Remove workspaces from Mantid for sample in self.samples: DeleteWorkspace(sample) DeleteWorkspace(params) DeleteWorkspace(fit_group) DeleteWorkspace(self.resolution)
def validateGSS(self): '''Validate the created gss file''' from mantid.api import FileFinder return self.gss_file, FileFinder.getFullPath(self.ref_gss_file)
def cleanup(self): absfile = FileFinder.getFullPath("EQSANS_1466_event_reduction.log") if os.path.exists(absfile): os.remove(absfile) return True
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source # & Institut Laue - Langevin # SPDX - License - Identifier: GPL - 3.0 + #pylint: disable=attribute-defined-outside-init from __future__ import (absolute_import, division, print_function) import systemtesting from mantid.kernel import config from mantid.api import FileFinder from sans.command_interface.ISISCommandInterface import (UseCompatibilityMode, LOQ, MaskFile, BatchReduce) MASKFILE = FileFinder.getFullPath('MaskLOQData.txt') BATCHFILE = FileFinder.getFullPath('loq_batch_mode_reduction.csv') class LOQMinimalBatchReductionTest_V2(systemtesting.MantidSystemTest): def __init__(self): super(LOQMinimalBatchReductionTest_V2, self).__init__() config['default.instrument'] = 'LOQ' def runTest(self): UseCompatibilityMode() LOQ() MaskFile(MASKFILE) BatchReduce(BATCHFILE, '.nxs', combineDet='merged', saveAlgs={}) def validate(self): # note increased tolerance to something which quite high # this is partly a temperary measure, but also justified by
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source # & Institut Laue - Langevin # SPDX - License - Identifier: GPL - 3.0 + #pylint: disable=attribute-defined-outside-init from __future__ import (absolute_import, division, print_function) import stresstesting from mantid.kernel import config from mantid.api import FileFinder from sans.command_interface.ISISCommandInterface import (UseCompatibilityMode, LOQ, MaskFile, BatchReduce) MASKFILE = FileFinder.getFullPath('MaskLOQData.txt') BATCHFILE = FileFinder.getFullPath('loq_batch_mode_reduction.csv') class LOQMinimalBatchReductionTest_V2(stresstesting.MantidStressTest): def __init__(self): super(LOQMinimalBatchReductionTest_V2, self).__init__() config['default.instrument'] = 'LOQ' def runTest(self): UseCompatibilityMode() LOQ() MaskFile(MASKFILE) BatchReduce(BATCHFILE, '.nxs', combineDet='merged', saveAlgs={}) def validate(self):
#pylint: disable=invalid-name,attribute-defined-outside-init """ The tests here are ports from the original SANS2DReductionGUI.py test suite. Not all tests can be ported since they include details about the ReductionSingleton """ from __future__ import (absolute_import, division, print_function) import stresstesting from mantid.kernel import (config) from mantid.api import (FileFinder) from mantid.simpleapi import RenameWorkspace from sans.command_interface.ISISCommandInterface import ( BatchReduce, SANS2D, MaskFile, AssignSample, AssignCan, TransmissionSample, TransmissionCan, WavRangeReduction, UseCompatibilityMode, FindBeamCentre) MASKFILE = FileFinder.getFullPath('MaskSANS2DReductionGUI.txt') BATCHFILE = FileFinder.getFullPath('sans2d_reduction_gui_batch.csv') class SANS2DMinimalBatchReductionTest_V2(stresstesting.MantidStressTest): """Minimal script to perform full reduction in batch mode """ def __init__(self): super(SANS2DMinimalBatchReductionTest_V2, self).__init__() config['default.instrument'] = 'SANS2D' self.tolerance_is_reller = True self.tolerance = 1.0e-2 def runTest(self): UseCompatibilityMode() SANS2D()
def test_full_path_returns_an_absolute_path_and_the_files_exists(self): path = FileFinder.getFullPath("CNCS_7860_event.nxs") self.assertTrue(len(path) > 0) # We can't be sure what the full path is in general but it should certainly exist! self.assertTrue(os.path.exists(path))
def cleanup(self): print("Cleanup") absfile = FileFinder.getFullPath("input.csv") if os.path.exists(absfile): os.remove(absfile) return True
def _find_file_or_die(self, name): full_path = FileFinder.getFullPath(name) if not full_path: self.fail("Could not find file \"{}\"".format(name)) return full_path
def _get_reference_result(self, name): path = FileFinder.getFullPath(name) if path is None or path == "": raise RuntimeError( "Could not find unit test data: {}".format(name)) return path
#pylint: disable=invalid-name,attribute-defined-outside-init """ The tests here are ports from the original SANS2DReductionGUI.py test suite. Not all tests can be ported since they include details about the ReductionSingleton """ from __future__ import (absolute_import, division, print_function) import systemtesting from mantid.kernel import (config) from mantid.api import (FileFinder) from mantid.simpleapi import RenameWorkspace from sans.command_interface.ISISCommandInterface import (BatchReduce, SANS2D, MaskFile, AssignSample, AssignCan, TransmissionSample, TransmissionCan, WavRangeReduction, UseCompatibilityMode, FindBeamCentre) MASKFILE = FileFinder.getFullPath('MaskSANS2DReductionGUI.txt') BATCHFILE = FileFinder.getFullPath('sans2d_reduction_gui_batch.csv') class SANS2DMinimalBatchReductionTest_V2(systemtesting.MantidSystemTest): """Minimal script to perform full reduction in batch mode """ def __init__(self): super(SANS2DMinimalBatchReductionTest_V2, self).__init__() config['default.instrument'] = 'SANS2D' self.tolerance_is_rel_err = True self.tolerance = 1.0e-2 def runTest(self): UseCompatibilityMode() SANS2D()
def cleanup(self): print "Cleanup" absfile = FileFinder.getFullPath("input.csv") if os.path.exists(absfile): os.remove(absfile) return True
def _get_reference_result(self, name): path = FileFinder.getFullPath(name) if path is None or path == "": raise RuntimeError("Could not find unit test data: {}".format(name)) return path
def validateDXYE(self): '''Validate the created gss file''' from mantid.api import FileFinder i = self.file_index self.file_index += 1 return self.xye_d_files[i], FileFinder.getFullPath(self.ref_xye_d_files[i])
def setUp(self): self._function = 'Voigt' self._parameter_file = FileFinder.getFullPath("IP0005.par") self._calibrated_params = self.load_ip_file() self._mode = 'FoilOut' self._energy_estimates = np.array([ENERGY_ESTIMATE])
help='ISIS SANS2D user file to configure the reduction.') parser.add_argument( '-e', '--event-mode', action='store_true', help= 'Use event mode reduction instead of legacy reduction which converts to histograms early.' ) args = parser.parse_args() # Adapted from SANS2DSlicingTest_V2.SANS2DMinimalBatchReductionSlicedTest_V2 from mantid.api import (AnalysisDataService, FileFinder) from sans.command_interface.ISISCommandInterface import ( SANS2D, MaskFile, BatchReduce, SetEventSlices, UseCompatibilityMode, AssignSample, AssignCan, TransmissionSample, TransmissionCan, WavRangeReduction) MASKFILE = args.user_file BATCHFILE = FileFinder.getFullPath('sans2d_reduction_gui_batch.csv') # Compatibility mode converts to histograms earlier. # If enabled use something like the following line in MASKFILE to define binning: # L/EVENTSTIME 7000.0,500.0,60000.0 if not args.event_mode: UseCompatibilityMode() SANS2D() MaskFile(MASKFILE) SetEventSlices("0.0-451, 5-10") BatchReduce(BATCHFILE, '.nxs', saveAlgs={}, combineDet='rear')
def test_filename_accepts_full_filepath(self): diff_mode = "FoilOut" rawfile = FileFinder.getFullPath("EVS14188.raw") self._run_load(rawfile, "3", diff_mode) self.assertTrue(mtd.doesExist('evs_raw')) self.assertEqual(mtd['evs_raw'].getNumberHistograms(), 1)
def test_filename_accepts_full_filepath(self): diff_mode = "FoilOut" rawfile = FileFinder.getFullPath("EVS14188.raw") self._run_load(rawfile, "3", diff_mode) self.assertTrue(mtd.doesExist('evs_raw')) self.assertEquals(mtd['evs_raw'].getNumberHistograms(), 1)
# pylint: disable=no-init from mantid.api import FileFinder from mantid.simpleapi import * from mantid import config import os.path import stresstesting import sys F_DIR = FileFinder.getFullPath("PowderISIS") sys.path.append(F_DIR) import cry_ini import cry_focus class ISISPowderDiffraction(stresstesting.MantidStressTest): def requiredFiles(self): return set(["hrp39191.raw", "hrp39187.raw", "hrp43022.raw", "hrpd/test/GrpOff/hrpd_new_072_01.cal", "hrpd/test/GrpOff/hrpd_new_072_01_corr.cal", "hrpd/test/cycle_09_2/Calibration/van_s1_old-0.nxs", "hrpd/test/cycle_09_2/Calibration/van_s1_old-1.nxs", "hrpd/test/cycle_09_2/Calibration/van_s1_old-2.nxs", "hrpd/test/cycle_09_2/tester/mtd.pref"]) def _clean_up_files(self, filenames, directories): try: for file in filenames: path = os.path.join(directories[0], file) os.remove(path) except OSError, ose: print 'could not delete generated file : ', ose.filename def runTest(self): dirs = config['datasearch.directories'].split(';')
def setUp(self): self._calc_L0 = False self._parameter_file = FileFinder.getFullPath("IP0005.par") self._calibrated_params = self.load_ip_file() self._iterations = 2