def execute_calculate_2theta_intensity(self, ipts_number, run_number):
        """
        sum events' counts along tube, convert tube center to 2theta
        :return:
        """
        # locate original nexus file
        if ipts_number is None:
            ipts_number = mantid_helper.get_ipts_number(run_number)
        event_file_name = '/SNS/VULCAN/IPTS-{}/nexus/VULCAN_{}.nxs.h5'.format(
            ipts_number, run_number)

        # load data from file
        ws_name_i = 'VULCAN_{}_events'.format(run_number)
        mantid_helper.load_nexus(data_file_name=event_file_name,
                                 output_ws_name=ws_name_i,
                                 meta_data_only=False)

        # now count the events per column on high angle detector
        counts_vec = self._count_events_by_det_column(ws_name_i)
        self._data_set = counts_vec

        # get proton charges
        event_ws = mantid_helper.retrieve_workspace(ws_name_i)
        plog = event_ws.run().getProperty('proton_charge')
        pcharges = plog.value.sum()
        self._proton_charges = [pcharges]

        return counts_vec
Beispiel #2
0
    def chop_data(self, split_ws_name=None, info_ws_name=None, do_tof_correction=False):
        """
        chop data and save to GSAS file
        :param split_ws_name:
        :param info_ws_name:
        :param TOF correction
        :return:
        """
        # get data file names, splitters workspace and output directory from reduction setup object
        raw_file_name = self._reductionSetup.locate_event_nexus()
        if split_ws_name is None:
            split_ws_name, info_ws_name = self._reductionSetup.get_splitters(throw_not_set=True)
        elif info_ws_name is None:
            raise RuntimeError(
                'Splitters workspace name must be given with information workspace name.')
        useless, output_directory = self._reductionSetup.get_chopped_directory(
            True, nexus_only=True)

        if do_tof_correction:
            raise RuntimeError('Not implemented for TOF correction yet.')

        # get number of target workspace
        number_target_ws, is_epoch_time = chop_utility.get_number_chopped_ws(split_ws_name)

        # load data from file to workspace
        event_ws_name = os.path.split(raw_file_name)[1].split('.')[0]
        mantid_helper.load_nexus(data_file_name=raw_file_name,
                                 output_ws_name=event_ws_name, meta_data_only=False)

        if number_target_ws < MAX_CHOPPED_WORKSPACE_IN_MEM:
            # chop event workspace with regular method
            # TODO/DEBUG - Split workspace won't be deleted at this stage
            status, ret_obj = mantid_helper.split_event_data(raw_ws_name=event_ws_name,
                                                             split_ws_name=split_ws_name,
                                                             info_table_name=info_ws_name,
                                                             target_ws_name=None,
                                                             tof_correction=do_tof_correction,
                                                             output_directory=output_directory,
                                                             delete_split_ws=False)
        else:
            # chop event workspace to too many target workspaces which cannot be hold in memory
            # simultaneously
            status, ret_obj = self.chop_data_large_number_targets(event_ws_name,
                                                                  tof_correction=do_tof_correction,
                                                                  output_dir=output_directory,
                                                                  is_epoch_time=is_epoch_time,
                                                                  num_target_ws=number_target_ws,
                                                                  delete_split_ws=True)

        # TODO - NIGHT (Nice) - save the split workspace for future reference
        # delete raw workspace
        # TODO/ISSUE/NOWNOW - Requiring a user option for this!
        print('[INFO] Deleting raw event workspace {0} which {1} exists.'
              ''.format(event_ws_name, AnalysisDataService.doesExist(event_ws_name)))
        if AnalysisDataService.doesExist(event_ws_name):
            mantid_helper.delete_workspace(event_ws_name)

        return status, ret_obj
Beispiel #3
0
    def load_data_file(self):
        """ Load NeXus file
        :return:
        """
        # use base name for output workspace
        base_name = os.path.basename(self._myNeXusFileName)
        out_ws_name = base_name.split('.')[0] + '_MetaData'
        if mantid_helper.workspace_does_exist(out_ws_name):
            # avoid re-load
            pass
        else:
            # Load sample logs
            status, ret_obj = mantid_helper.load_nexus(
                data_file_name=self._myNeXusFileName,
                output_ws_name=out_ws_name,
                meta_data_only=True)

            if status is False:
                err_msg = str(ret_obj)
                raise RuntimeError(err_msg)

        # register
        self._meta_ws_name = out_ws_name

        # Set up log names list
        try:
            self._logNameList = mantid_helper.get_sample_log_names(
                self._meta_ws_name)
            assert isinstance(self._logNameList, list)
        except RuntimeError as err:
            return False, 'Unable to retrieve series log due to %s.' % str(err)

        # Set up run start time
        self._runStartTime = mantid_helper.get_run_start(
            self._meta_ws_name, time_unit='nanosecond')

        return out_ws_name
Beispiel #4
0
    def load_binned_data(self,
                         data_file_name,
                         data_file_type,
                         max_int,
                         prefix='',
                         data_key=None,
                         target_unit=None):
        """ Load binned data
        :param data_file_name:
        :param data_file_type:
        :param prefix: prefix of the GSAS workspace name. It can be None, an integer, or a string
        :param max_int: maximum integer for sequence such as 999 for 001, 002, ... 999
        :param data_key: data key or None (to use workspace name as data key)
        :param target_unit: target unit or None
        :return: string as data key (aka. workspace name)
        """
        # check inputs
        datatypeutility.check_file_name(data_file_name, True, False, False,
                                        'Binned/reduced data file to load')
        if data_file_type is not None:
            datatypeutility.check_string_variable('Data file type',
                                                  data_file_type,
                                                  ['gsas', 'processed nexus'])
        if data_key is not None:
            datatypeutility.check_string_variable('Data key', data_key)
        datatypeutility.check_string_variable('Workspace prefix', prefix)

        # find out the type of the data file
        file_name, file_extension = os.path.splitext(data_file_name)

        if data_file_type is None:
            if file_extension.lower() in ['.gda', '.gsa', '.gss']:
                data_file_type = 'gsas'
            elif file_extension.lower() == '.nxs':
                data_file_type = 'processed nexus'
            else:
                raise RuntimeError(
                    'Data file type {0} is not recognized.'.format(
                        data_file_type))
        else:
            data_file_type = data_file_type.lower()
        # END-IF-ELSE

        # Load data
        data_ws_name = self.construct_workspace_name(data_file_name,
                                                     data_file_type, prefix,
                                                     max_int)

        if data_file_type == 'gsas':
            # load as GSAS
            mantid_helper.load_gsas_file(data_file_name,
                                         data_ws_name,
                                         standard_bin_workspace=None)
        elif data_file_type == 'processed nexus':
            # load processed nexus
            mantid_helper.load_nexus(data_file_name=file_name,
                                     output_ws_name=data_ws_name,
                                     meta_data_only=False)
        else:
            raise RuntimeError('Unable to support %s file.' % data_file_type)

        # convert unit
        if target_unit:
            mantid_helper.mtd_convert_units(data_ws_name, target_unit)

        if data_key is None:
            data_key = data_ws_name

        # register by adding to data management dictionary
        self._workspaceDict[data_key] = data_ws_name
        # TODO - TONIGHT 0 - Add an option to the method such that single run data will go to singleGSASDict
        # TODO - ... ...   - chopped run will NOT to be recorded .. self._loadedGSSDict[] = ...maybe
        self._singleGSASDict[data_key] = data_file_name

        return data_key
Beispiel #5
0
    def get_proton_charge(ipts_number, run_number, chop_sequence):
        """ get proton charge (single value) from a run
        :param ipts_number:
        :param run_number:
        :param chop_sequence:
        :return:
        """
        # check inputs' types
        assert isinstance(ipts_number, int), 'IPTS number {0} must be an integer but not a {1}' \
                                             ''.format(ipts_number, type(ipts_number))
        assert isinstance(run_number, int), 'Run number {0} must be an integer but not a {1}.' \
                                            ''.format(run_number, type(run_number))

        # file
        if chop_sequence is None:
            # regular run: load the NeXus file and find out
            nexus_file = '/SNS/VULCAN/IPTS-{0}/nexus/VULCAN_{1}.nxs.h5'.format(
                ipts_number, run_number)
            if not os.path.exists(nexus_file):
                nexus_file2 = '/SNS/VULCAN/IPTS-{0}/data/VULCAN_{1}_event.nxs'.format(
                    ipts_number, run_number)
                if os.path.exists(nexus_file2) is False:
                    raise RuntimeError(
                        'Unable to locate NeXus file for IPTS-{0} Run {1} with name '
                        '{2} or {3}'.format(ipts_number, run_number,
                                            nexus_file, nexus_file2))
                else:
                    nexus_file = nexus_file2
            # END-IF

            # load data, get proton charge and delete
            out_name = '{0}_Meta'.format(run_number)
            mantid_helper.load_nexus(data_file_name=nexus_file,
                                     output_ws_name=out_name,
                                     meta_data_only=True)
            proton_charge = mantid_helper.get_sample_log_value_single(
                out_name, 'gd_prtn_chrg')
            # convert unit from picoCoulumb to uA.hour
            proton_charge *= 1E6 * 3600.
            mantid_helper.delete_workspace(out_name)

        else:
            # chopped run: get the proton charge value from
            record_file_name = '/SNS/VULCAN/IPTS-{0}/shared/ChoppedData/{1}/{1}sampleenv_chopped_mean.txt' \
                               ''.format(ipts_number, run_number)
            if os.path.exists(record_file_name) is False:
                raise RuntimeError(
                    'Unable to locate chopped data record file {0}'.format(
                        record_file_name))

            # import csv
            data_set = pandas.read_csv(record_file_name,
                                       header=None,
                                       delim_whitespace=True,
                                       index_col=0)
            try:
                proton_charge = data_set.loc[chop_sequence][1]
                proton_charge = float(proton_charge)
            except KeyError as key_err:
                raise RuntimeError(
                    'Unable to find chop sequence {0} in {1} due to {2}'
                    ''.format(chop_sequence, record_file_name, key_err))
        # END-IF

        return proton_charge
Beispiel #6
0
def export_vanadium_intensity_to_file(van_nexus_file, gsas_van_int_file):
    """
    export a vanadium to intensity file, whic is of GSAS format
    NOTE: THIS IS VERY INSTRUMENT GEOMETRY SENSITIVE!
    :param van_nexus_file:
    :param gsas_van_int_file:
    :return:
    """
    # check
    assert isinstance(van_nexus_file, str), 'Vanadium NeXus file {0} must be a string but not a {1}.' \
                                            ''.format(van_nexus_file, type(van_nexus_file))
    if os.path.exists(van_nexus_file) is False:
        raise RuntimeError('Given vanadium NeXus path {0} is incorrect.'.format(van_nexus_file))

    assert isinstance(gsas_van_int_file, str), 'Target GSAS vanadium intensity file {0} must be a string but not a ' \
                                               '{1}.'.format(gsas_van_int_file,
                                                             type(gsas_van_int_file))

    # write to file
    try:
        int_file = open(gsas_van_int_file, 'w')
    except IOError as io_err:
        raise RuntimeError('Unable to write to file {0} due to {1}'.format(
            gsas_van_int_file, io_err))
    except OSError as os_err:
        raise RuntimeError('Unable to write to file {0} due to {1}'.format(
            gsas_van_int_file, os_err))

    # load data file
    out_file_name = os.path.basename(van_nexus_file).split('.')[0]
    mantid_helper.load_nexus(data_file_name=van_nexus_file,
                             output_ws_name=out_file_name, meta_data_only=False)
    event_ws = mantid_helper.retrieve_workspace(out_file_name)

    # Parse to intensity file
    int_buf = ''
    # num_spec = event_ws.getNumberHistograms()
    det_count = 0

    for row_index in range(0, 1224 + 1, 8):
        pack_index_west = range(0, 2464 + 1, 1232)
        pack_index_east = range(3696, 6160 + 1, 1232)
        pack_index_both = pack_index_west + pack_index_east
        for pack_index in pack_index_both:
            for i_ws in range(8):
                ws_index = row_index + pack_index + i_ws

                num_events = event_ws.getEventList(ws_index).getNumberEvents()
                # format to float with 8 significant digit
                format_event_str = format_float_number(num_events, 8)

                int_buf += '{0:>16}'.format(format_event_str)
                # start a new line at 8th detector's count
                if det_count == 8 * 6 - 1:
                    int_buf += '\n'
                    det_count = 0
                else:
                    det_count += 1
                    # END-FOR
    # END-FOR

    int_file.write(int_buf)
    int_file.close()

    return
    def execute_scan_rotating_collimator(self, ipts_number, run_number_list,
                                         pixels, to_focus_spectra):
        """
        :param run_number_list:
        :param pixels:
        :param to_focus_spectra:
        :return:
        """
        datatypeutility.check_list('Run numbers', run_number_list)
        datatypeutility.check_list('Pixel IDs', pixels)

        calib_manager = reductionmanager.CalibrationManager()

        data_set = dict()

        self._run_numbers = run_number_list[:]

        # load run numbers
        for run_number in run_number_list:
            # locate original nexus file
            if ipts_number is None:
                ipts_number = mantid_helper.get_ipts_number(run_number)
            event_file_name = '/SNS/VULCAN/IPTS-{}/nexus/VULCAN_{}.nxs.h5'.format(
                ipts_number, run_number)

            # load data from file
            ws_name_i = 'VULCAN_{}_events'.format(run_number)
            mantid_helper.load_nexus(data_file_name=event_file_name,
                                     output_ws_name=ws_name_i,
                                     meta_data_only=False)

            # align
            run_start_date = file_utilities.check_file_creation_date(
                event_file_name)
            has_loaded_cal, calib_ws_collection = calib_manager.has_loaded(
                run_start_date, 3)
            if not has_loaded_cal:
                calib_manager.search_load_calibration_file(
                    run_start_date, 3, ws_name_i)
            # workspaces = calib_manager.get_loaded_calibration_workspaces(run_start_date, 3)
            calib_ws_name = calib_ws_collection.calibration
            # group_ws_name = workspaces.grouping
            # mask_ws_name = workspaces.mask

            # align and output to dSpacing
            mantid_reduction.align_instrument(ws_name_i, calib_ws_name)

            # focus or not
            out_name_i = ws_name_i + '_partial'
            workspace_index_vec = vulcan_util.convert_pixels_to_workspace_indexes_v1(
                pixel_id_list=pixels)
            if to_focus_spectra:
                # focus:
                # mantid_helper.mtd_convert_units(ws_name_i, target_unit='dSpacing')
                mantid_helper.rebin(ws_name_i, '-0.1', preserve=True)
                mantid_helper.sum_spectra(
                    ws_name_i,
                    output_workspace=out_name_i,
                    workspace_index_list=workspace_index_vec)
                mantid_helper.mtd_convert_units(out_name_i, target_unit='TOF')
                mantid_helper.rebin(out_name_i,
                                    '3000, -0.0003, 70000',
                                    preserve=True)
            else:
                # sum spectra: rebin
                mantid_helper.mtd_convert_units(ws_name_i, target_unit='TOF')
                mantid_helper.rebin(ws_name_i,
                                    '3000, -0.0003, 70000',
                                    preserve=True)
                mantid_helper.sum_spectra(
                    ws_name_i,
                    output_workspace=out_name_i,
                    workspace_index_list=workspace_index_vec)
            # END-IF

            # convert to point data
            mantid_helper.convert_to_point_data(out_name_i)

            # get workspace
            out_ws = mantid_helper.retrieve_workspace(out_name_i, True)
            data_set[run_number] = out_ws.readX(0), out_ws.readY(0)
        # END-FOR

        self._data_set = data_set

        return data_set