def load_legacy_cross_Sections(self, file_path): """ For legacy MR data, we need to load each cross-section independently. :param str file_path: data file path """ ws_base_name = os.path.basename(file_path) cross_sections = list() for entry in ['Off_Off', 'On_Off', 'Off_On', 'On_On']: try: ws_name = "%s_%s" % (ws_base_name, entry) ws = api.LoadEventNexus(Filename=file_path, NXentryName='entry-%s' % entry, OutputWorkspace=ws_name) api.AddSampleLog(Workspace=ws, LogName='cross_section_id', LogText=entry) cross_sections.append(ws_name) except: api.logger.information( "Could not load %s from legacy data file" % entry) # Prepare output workspace group output_wsg = self.getPropertyValue("CrossSectionWorkspaces") api.GroupWorkspaces(InputWorkspaces=cross_sections, OutputWorkspace=output_wsg) self.setProperty("CrossSectionWorkspaces", output_wsg)
def filter_cross_sections(self, file_path): """ Filter events according to the polarization states :param str file_path: data file path """ output_wsg = self.getPropertyValue("CrossSectionWorkspaces") pol_state = self.getProperty("PolState").value pol_veto = self.getProperty("PolVeto").value ana_state = self.getProperty("AnaState").value ana_veto = self.getProperty("AnaVeto").value ws_event_data = self.getProperty("InputWorkspace").value if ws_event_data is not None: ws_raw_name = str(ws_event_data) ws_raw = ws_event_data else: ws_raw_name = os.path.basename(file_path) ws_raw = api.LoadEventNexus(Filename=file_path, OutputWorkspace=ws_raw_name) if self.getProperty("CheckDevices").value: # Check whether we have a polarizer polarizer = ws_raw.getRun().getProperty("Polarizer").value[0] # Check whether we have an analyzer analyzer = ws_raw.getRun().getProperty("Analyzer").value[0] else: polarizer = 1 analyzer = 1 change_list = [] if polarizer > 0: # SF1 ON splitws, _ = api.GenerateEventsFilter( InputWorkspace=ws_raw_name, LogName=pol_state, MinimumLogValue=0.99, MaximumLogValue=1.01, TimeTolerance=0, OutputWorkspace='filter', InformationWorkspace='filter_info', LogBoundary='Left', UnitOfTime='Seconds') time_dict = splitws.toDict() change_list.extend( extract_times(time_dict['start'], True, is_sf1=True)) change_list.extend( extract_times(time_dict['stop'], False, is_sf1=True)) # SF1 OFF splitws, _ = api.GenerateEventsFilter( InputWorkspace=ws_raw_name, LogName=pol_state, MinimumLogValue=-0.01, MaximumLogValue=0.01, TimeTolerance=0, OutputWorkspace='filter', InformationWorkspace='filter_info', LogBoundary='Left', UnitOfTime='Seconds') time_dict = splitws.toDict() change_list.extend( extract_times(time_dict['start'], False, is_sf1=True)) change_list.extend( extract_times(time_dict['stop'], True, is_sf1=True)) # SF1 VETO if not pol_veto == '': splitws, _ = api.GenerateEventsFilter( InputWorkspace=ws_raw_name, LogName=pol_veto, MinimumLogValue=0.99, MaximumLogValue=1.01, TimeTolerance=0, OutputWorkspace='filter', InformationWorkspace='filter_info', LogBoundary='Left', UnitOfTime='Seconds') time_dict = splitws.toDict() change_list.extend( extract_times(time_dict['start'], True, is_veto1=True)) change_list.extend( extract_times(time_dict['stop'], False, is_veto1=True)) if analyzer > 0: # SF2 ON splitws, _ = api.GenerateEventsFilter( InputWorkspace=ws_raw_name, LogName=ana_state, MinimumLogValue=0.99, MaximumLogValue=1.01, TimeTolerance=0, OutputWorkspace='filter', InformationWorkspace='filter_info', LogBoundary='Left', UnitOfTime='Seconds') time_dict = splitws.toDict() change_list.extend( extract_times(time_dict['start'], True, is_sf2=True)) change_list.extend( extract_times(time_dict['stop'], False, is_sf2=True)) # SF2 OFF splitws, _ = api.GenerateEventsFilter( InputWorkspace=ws_raw_name, LogName=ana_state, MinimumLogValue=-0.01, MaximumLogValue=0.01, TimeTolerance=0, OutputWorkspace='filter', InformationWorkspace='filter_info', LogBoundary='Left', UnitOfTime='Seconds') time_dict = splitws.toDict() change_list.extend( extract_times(time_dict['start'], False, is_sf2=True)) change_list.extend( extract_times(time_dict['stop'], True, is_sf2=True)) # SF2 VETO if not ana_veto == '': splitws, _ = api.GenerateEventsFilter( InputWorkspace=ws_raw_name, LogName=ana_veto, MinimumLogValue=0.99, MaximumLogValue=1.01, TimeTolerance=0, OutputWorkspace='filter', InformationWorkspace='filter_info', LogBoundary='Left', UnitOfTime='Seconds') time_dict = splitws.toDict() change_list.extend( extract_times(time_dict['start'], True, is_veto2=True)) change_list.extend( extract_times(time_dict['stop'], False, is_veto2=True)) start_time = ws_raw.run().startTime().totalNanoseconds() change_list = sorted(change_list, key=itemgetter(0)) split_table_ws = self.create_table(change_list, start_time, has_polarizer=polarizer > 0, has_analyzer=analyzer > 0) # Filter events if we found enough information to do so if split_table_ws.rowCount() > 0: outputs = api.FilterEvents(InputWorkspace=ws_raw, SplitterWorkspace=split_table_ws, OutputWorkspaceBaseName=output_wsg, GroupWorkspaces=True, FilterByPulseTime=False, OutputWorkspaceIndexedFrom1=False, CorrectionToSample="None", SpectrumWithoutDetector="Skip", SplitSampleLogs=True, RelativeTime=True, ExcludeSpecifiedLogs=True, OutputTOFCorrectionWorkspace='_tmp') AnalysisDataService.remove('_tmp') for ws in outputs[-1]: pol_state = str(ws).replace(output_wsg + '_', '') api.AddSampleLog(Workspace=ws, LogName='cross_section_id', LogText=pol_state) if ws_event_data is None: AnalysisDataService.remove(ws_raw_name) self.setProperty("CrossSectionWorkspaces", output_wsg) # If we don't have a splitter table, it might be because we don't have analyzer/polarizer # information. In this case don't filter and return the raw workspace. elif polarizer <= 0 and analyzer <= 0: api.logger.warning("No polarizer/analyzer information available") self.setProperty("CrossSectionWorkspaces", api.GroupWorkspaces([ws_raw])) else: api.logger.error("No events remained after filtering") if ws_event_data is None: AnalysisDataService.remove(ws_raw_name)