def load_data(self, file_path): r""" # type: (unicode) -> WorkspaceGroup @brief Load one or more data sets according to the needs ot the instrument. @details This function assumes that when loading more than one data file, the files are congruent and their events will be added together. @param file_path: absolute path to one or more data files. If more than one, paths should be concatenated with the plus symbol '+'. @returns WorkspaceGroup with any number of cross-sections """ fp_instance = FilePath(file_path) xs_list = list() temp_workspace_root_name = ''.join( random.sample(string.ascii_letters, 12)) # random string of 12 characters workspace_root_name = fp_instance.run_numbers( string_representation='short') for path in fp_instance.single_paths: is_legacy = path.endswith(".nxs") if is_legacy or not USE_SLOW_FLIPPER_LOG: _path_xs_list = api.MRFilterCrossSections( Filename=path, PolState=self.pol_state, AnaState=self.ana_state, PolVeto=self.pol_veto, AnaVeto=self.ana_veto, CrossSectionWorkspaces="%s_entry" % temp_workspace_root_name) # Only keep good workspaces, and get rid of the rejected events path_xs_list = [ ws for ws in _path_xs_list if not ws.getRun()['cross_section_id'].value == 'unfiltered' ] else: ws = api.LoadEventNexus(Filename=path, OutputWorkspace="raw_events") path_xs_list = self.dummy_filter_cross_sections( ws, name_prefix=temp_workspace_root_name) if len( xs_list ) == 0: # initialize xs_list with the cross sections of the first data file xs_list = path_xs_list for ws in xs_list: # replace the temporary names with the run number(s) name_new = str(ws).replace(temp_workspace_root_name, workspace_root_name) api.RenameWorkspace(str(ws), name_new) else: for i, ws in enumerate(xs_list): api.Plus(LHSWorkspace=str(ws), RHSWorkspace=str(path_xs_list[i]), OutputWorkspace=str(ws)) # Insert a log indicating which run numbers contributed to this cross-section for ws in xs_list: api.AddSampleLog( Workspace=str(ws), LogName='run_numbers', LogText=fp_instance.run_numbers(string_representation='short'), LogType='String') return xs_list
def test_init(self): assert FilePath(u'/SNS/REF_M_1.nxs').path == u'/SNS/REF_M_1.nxs' assert FilePath([u'/SNS/REF_M_2.nxs', u'/SNS/REF_M_1.nxs' ]).path == u'/SNS/REF_M_1.nxs+/SNS/REF_M_2.nxs' assert FilePath([u'/SNS/REF_M_2.nxs', u'/SNS/REF_M_1.nxs'], sort=False).path == \ u'/SNS/REF_M_2.nxs+/SNS/REF_M_1.nxs' assert FilePath(u'/SNS/REF_M_2.nxs+/SNS/REF_M_1.nxs' ).path == u'/SNS/REF_M_1.nxs+/SNS/REF_M_2.nxs'
def test_run_numbers(self): assert_equal_arrays( FilePath(u'/SNS/REF_M_3.nxs+/SNS/REF_M_1.nxs').run_numbers(), [1, 3]) file_path = FilePath( u'/SNS/REF_M_3.nxs+/SNS/REF_M_1.nxs+/SNS/REF_M_6.nxs+/SNS/REF_M_2.nxs' ) assert file_path.run_numbers(string_representation='long') == '1+2+3+6' assert file_path.run_numbers(string_representation='short') == '1:3+6'
def file_open_from_list(self): r"""Called when a new file is selected from the file list. This is an event call.""" if self.auto_change_active: return QtWidgets.QApplication.instance().processEvents() item = self.ui.file_list.currentItem() # type: QListWidgetItem name = unicode( item.text() ) # e.g 'REF_M_38199.nxs.h5' or 'REF_M_38198.nxs.h5+REF_M_38199.nxs.h5' filepath = FilePath.join(self.data_manager.current_directory, name) self.file_handler.open_file(filepath)
def __init__(self, file_path, configuration): # type: (unicode, Configurati0n) -> None """ @brief Structure to read in one or more Nexus data files @param file_path: absolute path to one or more files. If more than one, paths are concatenated with the plus symbol '+' @param configuration: reduction configurations """ self.file_path = FilePath( file_path).path # sort the paths if more than one self.number = '' # can be a singe number (e.g. '1234') or a composite (e.g '1234:1239+1245') self.configuration = configuration self.cross_sections = {} self.main_cross_section = None
def load(self, file_path, configuration, force=False, update_parameters=True, progress=None): # type: (str, Configuration, Optional[bool], Optional[bool], Optional[ProgressReporter]) -> bool r""" @brief Load one ore more Nexus data files @param file_path: absolute path to one or more files. If more than one, files are concatenated with the merge symbol '+'. @param configuration: configuration to use to load the data @param force: it True, existing data in the cache will be replaced by reading from file. @param update_parameters: if True, we will find peak ranges @param progress: aggregator to estimate percent of time allotted to this function @returns True if the data is retrieved from the cache of past loading events """ # Actions taken in this function: # 1. Find if the file has been loaded in the past. Retrieve the cache when force==False # 2. If file not in cache, or if force==True: invoke NexusData.load() # 3. Update attributes _nexus_data, current_directory, and current_file_name # 4. If we're overwriting cached data that was allocated in the reduction_list and direct_beam_list, # then assign the new data to the proper indexes in lists reduction_list and direct_beam_list # 5. Compute reflectivity if data is loaded from file nexus_data = None # type: NexusData is_from_cache = False # if True, the file has been loaded before reduction_list_id = None direct_beam_list_id = None file_path = FilePath( file_path, sort=True).path # force sorting by increasing run number if progress is not None: progress(10, "Loading data...") # Check whether the file has already been loaded (in cache) for i in range(len(self._cache)): if self._cache[i].file_path == file_path: if force: # Check whether the data is in the reduction list before removing it reduction_list_id = self.find_data_in_reduction_list( self._cache[i]) direct_beam_list_id = self.find_data_in_direct_beam_list( self._cache[i]) self._cache.pop(i) else: nexus_data = self._cache[i] is_from_cache = True break # If we don't have the data, load it if nexus_data is None: configuration.normalization = None nexus_data = NexusData(file_path, configuration) sub_task = progress.create_sub_task( max_value=70) if progress else None nexus_data.load(progress=sub_task, update_parameters=update_parameters) if progress is not None: progress(80, "Calculating...") if nexus_data is not None: self._nexus_data = nexus_data # Example: '/SNS/REF_M/IPTS-25531/nexus/REF_M_38198.nxs.h5+/SNS/REF_M/IPTS-25531/nexus/REF_M_38199.nxs.h5' # will be split into directory='/SNS/REF_M/IPTS-25531/nexus' and # file_name='REF_M_38198.nxs.h5+REF_M_38199.nxs.h5' directory, file_name = FilePath(file_path).split() self.current_directory = directory self.current_file_name = file_name self.set_channel(0) # If we didn't get this data set from our cache, add it and compute its reflectivity. if not is_from_cache: # Find suitable direct beam logging.info("Direct beam from loader: %s", configuration.normalization) if configuration.normalization is None and configuration.match_direct_beam: self.find_best_direct_beam() # Replace reduction and normalization entries as needed if reduction_list_id is not None: self.reduction_list[reduction_list_id] = nexus_data if direct_beam_list_id is not None: self.direct_beam_list[direct_beam_list_id] = nexus_data # Compute reflectivity try: self.calculate_reflectivity() except: logging.error("Reflectivity calculation failed for %s", file_name) # if cached reduced data exceeds maximum cache size, remove the oldest reduced data while len(self._cache) >= self.MAX_CACHE: self._cache.pop(0) self._cache.append(nexus_data) if progress is not None: progress(100) return is_from_cache
def test_split(self): assert_equal_arrays( FilePath(u'/SNS/REF_M_3.nxs').split(), (u'/SNS', u'REF_M_3.nxs')) assert_equal_arrays( FilePath(u'/SNS/REF_M_3.nxs+/SNS/REF_M_1.nxs').split(), (u'/SNS', u'REF_M_1.nxs+REF_M_3.nxs'))
def test_first_path(self): assert FilePath(u'/SNS/REF_M_3.nxs').first_path == u'/SNS/REF_M_3.nxs' assert FilePath(u'/SNS/REF_M_3.nxs+/SNS/REF_M_1.nxs' ).first_path == u'/SNS/REF_M_1.nxs'
def test_basename(self): assert FilePath(u'/SNS/REF_M_3.nxs').basename == u'REF_M_3.nxs' assert FilePath(u'/SNS/REF_M_3.nxs+/SNS/REF_M_1.nxs' ).basename == u'REF_M_1.nxs+REF_M_3.nxs'
def test_dirname(self): assert FilePath(u'/SNS/REF_M_3.nxs').dirname == u'/SNS' assert FilePath( u'/SNS/REF_M_3.nxs+/SNS/REF_M_1.nxs').dirname == u'/SNS'
def test_is_composite(self): assert FilePath(u'/SNS/REF_M_3.nxs').is_composite is False assert FilePath(u'/SNS/REF_M_3.nxs+/SNS/REF_M_1.nxs').is_composite
def test_single_paths(self): assert_equal_arrays( FilePath(u'/SNS/REF_M_3.nxs+/SNS/REF_M_1.nxs').single_paths, [u'/SNS/REF_M_1.nxs', u'/SNS/REF_M_3.nxs'])
def test_unique_dirname(self): assert FilePath.unique_dirname(u'/SNS/REF_M_1.nxs+/SNS/REF_M_2.nxs') assert FilePath.unique_dirname( u'/NSN/REF_M_1.nxs+/SNS/REF_M_2.nxs') is False
def test_join(self): assert FilePath.join(u'/SNS', u'REF_M_1.nxs') == u'/SNS/REF_M_1.nxs' assert FilePath.join( u'/SNS', u'REF_M_2.nxs+REF_M_1.nxs') == u'/SNS/REF_M_1.nxs+/SNS/REF_M_2.nxs'