def __init__(self): """ initialization """ super(LiveDataDriver, self).__init__() # archive manager self._archiveManager = archivemanager.DataArchiveManager('VULCAN') # clear the existing workspace with same name if mantid_helper.workspace_does_exist(LiveDataDriver.COUNTER_WORKSPACE_NAME): mantid_helper.delete_workspace(LiveDataDriver.COUNTER_WORKSPACE_NAME) # create workspace: workspace index 1 will be used to record number of events mantidsimple.CreateWorkspace(OutputWorkspace=LiveDataDriver.COUNTER_WORKSPACE_NAME, DataX=[0, 0], DataY=[0, 0], NSpec=2) # get the live reduction script self._live_reduction_script = LiveDataDriver.LIVE_PROCESS_SCRIPTS self._thread_continue = True # more containers self._peakMinD = None self._peakMaxD = None self._peakNormByVan = False # _peakParamDict: key = %.5f %.5f %d % (min-d, max-d, norm-by-van): value: dictionary # level-2 dict: key: workspace name, value: dictionary for bank 1, bank 2, bank 3, time # level-3 dict: key: bank ID, value: 3-tuple as peak intensity, peak center, variance self._peakParamDict = dict() self._currPeakParamKey = None self._vanadiumWorkspaceDict = dict() # key: bank ID. value: workspace name return
def chop_data(self, split_ws_name=None, info_ws_name=None, do_tof_correction=False): """ chop data and save to GSAS file :param split_ws_name: :param info_ws_name: :param TOF correction :return: """ # get data file names, splitters workspace and output directory from reduction setup object raw_file_name = self._reductionSetup.locate_event_nexus() if split_ws_name is None: split_ws_name, info_ws_name = self._reductionSetup.get_splitters(throw_not_set=True) elif info_ws_name is None: raise RuntimeError( 'Splitters workspace name must be given with information workspace name.') useless, output_directory = self._reductionSetup.get_chopped_directory( True, nexus_only=True) if do_tof_correction: raise RuntimeError('Not implemented for TOF correction yet.') # get number of target workspace number_target_ws, is_epoch_time = chop_utility.get_number_chopped_ws(split_ws_name) # load data from file to workspace event_ws_name = os.path.split(raw_file_name)[1].split('.')[0] mantid_helper.load_nexus(data_file_name=raw_file_name, output_ws_name=event_ws_name, meta_data_only=False) if number_target_ws < MAX_CHOPPED_WORKSPACE_IN_MEM: # chop event workspace with regular method # TODO/DEBUG - Split workspace won't be deleted at this stage status, ret_obj = mantid_helper.split_event_data(raw_ws_name=event_ws_name, split_ws_name=split_ws_name, info_table_name=info_ws_name, target_ws_name=None, tof_correction=do_tof_correction, output_directory=output_directory, delete_split_ws=False) else: # chop event workspace to too many target workspaces which cannot be hold in memory # simultaneously status, ret_obj = self.chop_data_large_number_targets(event_ws_name, tof_correction=do_tof_correction, output_dir=output_directory, is_epoch_time=is_epoch_time, num_target_ws=number_target_ws, delete_split_ws=True) # TODO - NIGHT (Nice) - save the split workspace for future reference # delete raw workspace # TODO/ISSUE/NOWNOW - Requiring a user option for this! print('[INFO] Deleting raw event workspace {0} which {1} exists.' ''.format(event_ws_name, AnalysisDataService.doesExist(event_ws_name))) if AnalysisDataService.doesExist(event_ws_name): mantid_helper.delete_workspace(event_ws_name) return status, ret_obj
def delete_splitter_workspace(self, slicer_tag): """ delete a splitter workspace by its tag :param slicer_tag: :return: """ # get splitters workspaces try: slicer_ws_name, info_ws_name = self.get_split_workspace(slicer_tag) except RuntimeError as run_err: return False, 'Unable to delete slicer with tag {0} of run {1} due to {2}.' \ ''.format(slicer_tag, self._myRunNumber, run_err) # delete workspaces mantid_helper.delete_workspace(slicer_ws_name) mantid_helper.delete_workspace(info_ws_name) return True, ''
def delete_workspace(workspace_name, no_throw=False): """ Delete a workspace from Mantid's AnalysisDataService Args: workspace_name: name of a workspace as a string instance no_throw: if True, then it won't throw any exception if the workspace does not exist in AnalysisDataService Returns: None """ # check assert isinstance(workspace_name, str), \ 'Input workspace name must be a string, but not %s.' % str(type(workspace_name)) # check whether the workspace exists does_exit = ADS.doesExist(workspace_name) if does_exit: # delete mantid_helper.delete_workspace(workspace=workspace_name) elif not no_throw: raise RuntimeError('Workspace %s does not exist.' % workspace_name) return
def get_proton_charge(ipts_number, run_number, chop_sequence): """ get proton charge (single value) from a run :param ipts_number: :param run_number: :param chop_sequence: :return: """ # check inputs' types assert isinstance(ipts_number, int), 'IPTS number {0} must be an integer but not a {1}' \ ''.format(ipts_number, type(ipts_number)) assert isinstance(run_number, int), 'Run number {0} must be an integer but not a {1}.' \ ''.format(run_number, type(run_number)) # file if chop_sequence is None: # regular run: load the NeXus file and find out nexus_file = '/SNS/VULCAN/IPTS-{0}/nexus/VULCAN_{1}.nxs.h5'.format( ipts_number, run_number) if not os.path.exists(nexus_file): nexus_file2 = '/SNS/VULCAN/IPTS-{0}/data/VULCAN_{1}_event.nxs'.format( ipts_number, run_number) if os.path.exists(nexus_file2) is False: raise RuntimeError( 'Unable to locate NeXus file for IPTS-{0} Run {1} with name ' '{2} or {3}'.format(ipts_number, run_number, nexus_file, nexus_file2)) else: nexus_file = nexus_file2 # END-IF # load data, get proton charge and delete out_name = '{0}_Meta'.format(run_number) mantid_helper.load_nexus(data_file_name=nexus_file, output_ws_name=out_name, meta_data_only=True) proton_charge = mantid_helper.get_sample_log_value_single( out_name, 'gd_prtn_chrg') # convert unit from picoCoulumb to uA.hour proton_charge *= 1E6 * 3600. mantid_helper.delete_workspace(out_name) else: # chopped run: get the proton charge value from record_file_name = '/SNS/VULCAN/IPTS-{0}/shared/ChoppedData/{1}/{1}sampleenv_chopped_mean.txt' \ ''.format(ipts_number, run_number) if os.path.exists(record_file_name) is False: raise RuntimeError( 'Unable to locate chopped data record file {0}'.format( record_file_name)) # import csv data_set = pandas.read_csv(record_file_name, header=None, delim_whitespace=True, index_col=0) try: proton_charge = data_set.loc[chop_sequence][1] proton_charge = float(proton_charge) except KeyError as key_err: raise RuntimeError( 'Unable to find chop sequence {0} in {1} due to {2}' ''.format(chop_sequence, record_file_name, key_err)) # END-IF return proton_charge
def load_smoothed_vanadium(self, van_gsas_file): """ Load smoothed vanadium spectra from GSAS file :param van_gsas_file: :return: """ # check assert isinstance( van_gsas_file, str), 'Vanadium GSAS file name {0} must be a string.'.format(van_gsas_file) if os.path.exists(van_gsas_file) is False: raise RuntimeError('Vanadium GSAS file {0} cannot be found.'.format(van_gsas_file)) # load file and edit instrument for dSpacing mantidsimple.LoadGSS(Filename=van_gsas_file, OutputWorkspace='vanadium') # 3 banks mantidsimple.EditInstrumentGeometry(Workspace='vanadium', PrimaryFlightPath=43.753999999999998, SpectrumIDs='1, 2, 3', L2='2,2,2', Polar='-90,90,{}'.format(mantid_helper.HIGH_ANGLE_BANK_2THETA)) mantidsimple.ConvertUnits(InputWorkspace='vanadium', OutputWorkspace='vanadium', Target='dSpacing') # bank 1 and 2: extract, rebin and smooth for bank in [1, 2]: ws_name = 'van_bank_{0}'.format(bank) mantidsimple.ExtractSpectra(InputWorkspace='vanadium', OutputWorkspace='van2banks', WorkspaceIndexList=bank-1) mantidsimple.Rebin(InputWorkspace='van2banks', OutputWorkspace='van2banks', Params='0.3,-0.001, 3.5') mantidsimple.FFTSmooth(InputWorkspace='van2banks', OutputWorkspace=ws_name, Filter='Butterworth', Params='20,2', IgnoreXBins=True, AllSpectra=True) self._vanadiumWorkspaceDict[bank] = ws_name # END-FOR mantid_helper.delete_workspace('van2banks') # bank3: different algorithm because it has more bins than bank 1 and 2 but has some issue with Mantid for bank in [3]: # special processing for bank 3 mantidsimple.ExtractSpectra(InputWorkspace='vanadium', OutputWorkspace='vanhighbank', WorkspaceIndexList=bank-1) # sort the bins: FIXME might be better to use numpy array bank3ws = ADS.retrieve('vanhighbank') vecx = bank3ws.readX(0) vecy = bank3ws.readY(0) xy_list = list() for i in range(len(vecy)): xy_list.append((vecx[i], vecy[i])) # X might be out of order xy_list.sort() vec_x = numpy.ndarray(shape=(len(vecx),), dtype='float') vec_y = numpy.ndarray(shape=(len(vecy),), dtype='float') for i, xy in enumerate(xy_list): vec_x[i] = xy[0] vec_y[i] = xy[1] vec_x[-1] = vecx[-1] # re-create workspace mantidsimple.CreateWorkspace(DataX=vec_x, DataY=vec_y, NSpec=1, UnitX='dSpacing', OutputWorkspace='vanbank3') mantidsimple.Rebin(InputWorkspace='vanbank3', OutputWorkspace='vanbank3', Params='0.3,-0.001, 3.5') ws_name = 'van_bank_{0}'.format(bank) mantidsimple.FFTSmooth(InputWorkspace='vanbank3', OutputWorkspace=ws_name, WorkspaceIndex=0, Filter='Butterworth', Params='20,2', IgnoreXBins=True, AllSpectra=True) self._vanadiumWorkspaceDict[bank] = ws_name # clean mantid_helper.delete_workspace('vanbank3') mantid_helper.delete_workspace('vanhighbank') # END-FOR # make sure there won't be any less than 0 item for ws_name in self._vanadiumWorkspaceDict.keys(): van_bank_i_ws = mantid_helper.retrieve_workspace( self._vanadiumWorkspaceDict[ws_name], True) for i in range(len(van_bank_i_ws.readY(0))): if van_bank_i_ws.readY(0)[i] < 1.: van_bank_i_ws.dataY(0)[i] = 1. # END-FOR return