Esempio n. 1
0
    def _sumRuns(self, run_set, sam_ws, mon_ws, extra_ext=None):
        for run in run_set:
            ws_name = self._makeRunName(run)
            if extra_ext is not None:
                ws_name += extra_ext
            mon_ws_name = ws_name + "_monitors"
            run_file = self._makeRunFile(run)
            # Reflection 311 is restricted to bank with name "bank2"
            api.LoadEventNexus(Filename=run_file,
                               BankName="bank2",
                               OutputWorkspace=ws_name)

            if not self._noMonNorm:
                api.LoadNexusMonitors(Filename=run_file,
                                      OutputWorkspace=mon_ws_name)
            if sam_ws != ws_name:
                api.Plus(LHSWorkspace=sam_ws,
                         RHSWorkspace=ws_name,
                         OutputWorkspace=sam_ws)
                api.DeleteWorkspace(ws_name)
            if mon_ws != mon_ws_name and not self._noMonNorm:
                api.Plus(LHSWorkspace=mon_ws,
                         RHSWorkspace=mon_ws_name,
                         OutputWorkspace=mon_ws)
                api.DeleteWorkspace(mon_ws_name)
Esempio n. 2
0
 def _load(self, run_numbers, data_name):
     """
     Load data and monitors for run numbers and monitors.
     Algorithm 'Load' can aggregate many runs into a single workspace, but it is not able to do so
     with the monitor workspaces.
     :param run_numbers: run numbers for data event files
     :param data_name: output name for data workspace. The name for the workspace holding the
     monitor data will be data_name+'_monitors'
     :return: None
     """
     # Find out the files for each run
     load_algorithm = api.AlgorithmManager.createUnmanaged("Load")
     load_algorithm.initialize()
     load_algorithm.setPropertyValue('Filename', str(run_numbers))
     files = (load_algorithm.getProperty('Filename').value)[0]
     if not isinstance(files, list):
         # run_numbers represents one file only
         sapi.Load(Filename=files, LoadMonitors=True, OutputWorkspace=data_name)
     else:
         sapi.Load(Filename=files[0], LoadMonitors=True, OutputWorkspace=data_name)
         monitor_name = data_name + '_monitors'
         for file in files[1:]:
             sapi.Load(Filename=file, LoadMonitors=True, OutputWorkspace=data_name+'_tmp')
             sapi.Plus(LHSWorkspace=data_name, RHSWorkspace=data_name+'_tmp', OutputWorkspace=data_name)
             sapi.Plus(LHSWorkspace=monitor_name, RHSWorkspace=data_name + '_tmp_monitors', OutputWorkspace=monitor_name)
         sapi.DeleteWorkspace(data_name+'_tmp')
     if sapi.mtd[data_name].getInstrument().getName() not in ('ARCS'):
         raise NotImplementedError("This algorithm works only for ARCS instrument")
Esempio n. 3
0
def _load_monitor_sum_range(files, input_dir, instrument):
    loop = 0
    num = files.split("_")
    frange = list(range(int(num[0]), int(num[1]) + 1))
    mspectra = instrument._get_monitor_spectra(int(num[0]))
    for i in frange:
        file_path = instrument._generate_input_full_path(i, input_dir)
        outwork = "mon" + str(i)
        mantid.LoadRaw(Filename=file_path,
                       OutputWorkspace=outwork,
                       SpectrumMin=mspectra,
                       SpectrumMax=mspectra,
                       LoadLogFiles="0")
        loop += 1
        if loop == 2:
            firstwk = "mon" + str(i - 1)
            secondwk = "mon" + str(i)
            load_monitor_summed = mantid.Plus(LHSWorkspace=firstwk,
                                              RHSWorkspace=secondwk)
            mantid.mtd.remove(firstwk)
            mantid.mtd.remove(secondwk)
        elif loop > 2:
            secondwk = "mon" + str(i)
            load_monitor_summed = mantid.Plus(LHSWorkspace=load_monitor_summed,
                                              RHSWorkspace=secondwk)
            mantid.mtd.remove(secondwk)

    return load_monitor_summed
Esempio n. 4
0
    def _sumRuns(self, run_set, sam_ws, mon_ws, extra_ext=None):
        """
        Aggregate the set of runs
        @param run_set: list of run numbers
        @param sam_ws:  name of aggregate workspace for the sample
        @param mon_ws:  name of aggregate workspace for the monitors
        @param extra_ext: string to be added to the temporary workspaces
        """
        for run in run_set:
            ws_name = self._makeRunName(run)
            if extra_ext is not None:
                ws_name += extra_ext
            mon_ws_name = ws_name + "_monitors"
            run_file = self._makeRunFile(run)

            api.Load(Filename=run_file, OutputWorkspace=ws_name)
            if not self._noMonNorm:
                api.LoadNexusMonitors(Filename=run_file,
                                      OutputWorkspace=mon_ws_name)
            if sam_ws != ws_name:
                api.Plus(LHSWorkspace=sam_ws,
                         RHSWorkspace=ws_name,
                         OutputWorkspace=sam_ws)
                api.DeleteWorkspace(ws_name)
            if mon_ws != mon_ws_name and not self._noMonNorm:
                api.Plus(LHSWorkspace=mon_ws,
                         RHSWorkspace=mon_ws_name,
                         OutputWorkspace=mon_ws)
                api.DeleteWorkspace(mon_ws_name)
Esempio n. 5
0
    def _sum_runs(self, run_set, sam_ws, mon_ws, extra_ext=None):
        """
        Aggregate the set of runs
        @param run_set: list of run numbers
        @param sam_ws:  name of aggregate workspace for the sample
        @param mon_ws:  name of aggregate workspace for the monitors
        @param extra_ext: string to be added to the temporary workspaces
        """
        for run in run_set:
            ws_name = self._make_run_name(run)
            if extra_ext is not None:
                ws_name += extra_ext
            mon_ws_name = ws_name + '_monitors'
            run_file = self._make_run_file(run)

            sapi.LoadEventNexus(Filename=run_file,
                                OutputWorkspace=ws_name,
                                BankName=self._reflection['banks'])
            if str(run)+':' in self.getProperty('ExcludeTimeSegment').value:
                self._filterEvents(str(run), ws_name)

            if self._MonNorm:
                sapi.LoadNexusMonitors(Filename=run_file,
                                       OutputWorkspace=mon_ws_name)

            if sam_ws != ws_name:
                sapi.Plus(LHSWorkspace=sam_ws,
                          RHSWorkspace=ws_name,
                          OutputWorkspace=sam_ws)
                sapi.DeleteWorkspace(ws_name)
            if mon_ws != mon_ws_name and self._MonNorm:
                sapi.Plus(LHSWorkspace=mon_ws,
                          RHSWorkspace=mon_ws_name,
                          OutputWorkspace=mon_ws)
                sapi.DeleteWorkspace(mon_ws_name)
Esempio n. 6
0
 def _multiple_load(self, data_file, workspace, property_manager, property_manager_name):
     instrument = ''
     if property_manager.existsProperty('InstrumentName'):
         property_manager.existsProperty('InstrumentName')
         instrument = property_manager.getProperty('InstrumentName').value
     else:
         property_manager.existsProperty('InstrumentName')
     output_str = ''
     if isinstance(data_file, str):
         if AnalysisDataService.doesExist(data_file):
             data_file = [data_file]
         else:
             data_file = find_data(data_file, instrument=instrument, allow_multiple=True)
     if isinstance(data_file, list):
         for i in range(len(data_file)):
             if i == 0:
                 output_str += self._load_data(data_file[i], workspace, property_manager, property_manager_name)
                 continue
             output_str += self._load_data(data_file[i], '__tmp_wksp', property_manager, property_manager_name)
             api.RebinToWorkspace(WorkspaceToRebin='__tmp_wksp', WorkspaceToMatch=workspace,
                                  OutputWorkspace='__tmp_wksp')
             api.Plus(LHSWorkspace=workspace, RHSWorkspace='__tmp_wksp', OutputWorkspace=workspace)
         if AnalysisDataService.doesExist('__tmp_wksp'):
             AnalysisDataService.remove('__tmp_wksp')
     else:
         output_str += 'Loaded %s\n' % data_file
         output_str += self._load_data(data_file, workspace, property_manager, property_manager_name)
     return output_str
Esempio n. 7
0
    def calculate_scaled_hab_output(self, shift, scale, sample_count_secondary,
                                    sample_norm_secondary, can_count_secondary,
                                    can_norm_secondary):
        scaled_norm_front = mantid_api.Scale(
            InputWorkspace=sample_norm_secondary,
            Factor=1.0 / scale,
            Operation='Multiply',
            StoreInADS=False)
        shifted_norm_front = mantid_api.Scale(
            InputWorkspace=sample_norm_secondary,
            Factor=shift,
            Operation='Multiply',
            StoreInADS=False)
        numerator = mantid_api.Plus(LHSWorkspace=sample_count_secondary,
                                    RHSWorkspace=shifted_norm_front,
                                    StoreInADS=False)
        hab_sample = mantid_api.Divide(LHSWorkspace=numerator,
                                       RHSWorkspace=scaled_norm_front,
                                       StoreInADS=False)

        if can_count_secondary is not None and can_norm_secondary is not None:
            scaled_norm_front_can = mantid_api.Scale(
                InputWorkspace=can_norm_secondary,
                Factor=1.0 / scale,
                Operation='Multiply',
                StoreInADS=False)
            hab_can = mantid_api.Divide(LHSWorkspace=can_count_secondary,
                                        RHSWorkspace=scaled_norm_front_can,
                                        StoreInADS=False)
            hab_sample = mantid_api.Minus(LHSWorkspace=hab_sample,
                                          RHSWorkspace=hab_can,
                                          StoreInADS=False)
            return hab_sample
        else:
            return hab_sample
Esempio n. 8
0
def _sum_groups_of_three_ws(calibrated_spectra, output_file_names):
    workspace_list = []
    output_list = []
    for outer_loop_count in range(0, 3):
        # First clone workspaces 1/4/7
        pass_multiplier = (outer_loop_count * 3)
        workspace_names = "focus_mode_groups-" + str(pass_multiplier + 1)
        workspace_list.append(
            mantid.CloneWorkspace(
                InputWorkspace=calibrated_spectra[pass_multiplier],
                OutputWorkspace=workspace_names))
        # Then add workspaces 1+2+3 / 4+5+6 / 7+8+9
        for i in range(1, 3):
            input_ws_index = i + pass_multiplier  # Workspaces 2/3 * n
            inner_workspace_names = "focus_mode_groups-" + str(input_ws_index)
            workspace_list[outer_loop_count] = mantid.Plus(
                LHSWorkspace=workspace_list[outer_loop_count],
                RHSWorkspace=calibrated_spectra[input_ws_index],
                OutputWorkspace=inner_workspace_names)

        # Finally scale the output workspaces
        mod_first_number = str((outer_loop_count * 3) + 1)  # Generates 1/4/7
        mod_last_number = str((outer_loop_count + 1) * 3)  # Generates 3/6/9
        workspace_names = output_file_names[
            "output_name"] + "_mod" + mod_first_number + '-' + mod_last_number
        output_list.append(
            mantid.Scale(InputWorkspace=workspace_list[outer_loop_count],
                         OutputWorkspace=workspace_names,
                         Factor=0.333333333333))
    for ws in workspace_list:
        remove_intermediate_workspace(ws)
    return output_list
Esempio n. 9
0
def _adjust_cal_file(original_cal, generated_cal):
    origin_ws = "origin{}"
    gen_ws = "newCal{}"
    out_ws = "adjusted_cal"
    mantid.LoadCalFile(InstrumentName="Gem",
                       MakeGroupingWorkspace=False,
                       MakeMaskWorkspace=False,
                       MakeOffsetsWorkspace=True,
                       WorkspaceName=origin_ws.format(''),
                       CalFilename=original_cal)
    mantid.LoadCalFile(InstrumentName="Gem",
                       MakeGroupingWorkspace=False,
                       MakeMaskWorkspace=False,
                       MakeOffsetsWorkspace=True,
                       WorkspaceName=gen_ws.format(''),
                       CalFilename=generated_cal)
    mantid.Plus(LHSWorkspace=origin_ws.format("_offsets"),
                RHSWorkspace=gen_ws.format("_offsets"),
                OutputWorkspace=out_ws)
    mantid.SaveCalFile(OffsetsWorkspace=out_ws, Filename=generated_cal)
    common.remove_intermediate_workspace([
        origin_ws.format("_offsets"),
        gen_ws.format("_offsets"),
        origin_ws.format("_cal"),
        gen_ws.format("_cal")
    ])
Esempio n. 10
0
def _focus_mode_groups(cycle_information, output_file_paths, save_range,
                       calibrated_spectra):
    output_list = []
    to_save = _sum_groups_of_three_ws(calibrated_spectra, output_file_paths)

    workspaces_4_to_9_name = output_file_paths["output_name"] + "_mods4-9"
    workspaces_4_to_9 = mantid.Plus(LHSWorkspace=to_save[1],
                                    RHSWorkspace=to_save[2])
    workspaces_4_to_9 = mantid.Scale(InputWorkspace=workspaces_4_to_9,
                                     Factor=0.5,
                                     OutputWorkspace=workspaces_4_to_9_name)
    to_save.append(workspaces_4_to_9)
    append = False
    index = 1
    for ws in to_save:
        if cycle_information["instrument_version"] == "new":
            mantid.SaveGSS(InputWorkspace=ws,
                           Filename=output_file_paths["gss_filename"],
                           Append=append,
                           Bank=index)
        elif cycle_information["instrument_version"] == "new2":
            mantid.SaveGSS(InputWorkspace=ws,
                           Filename=output_file_paths["gss_filename"],
                           Append=False,
                           Bank=index)

        workspace_names = ws.name()
        dspacing_ws = mantid.ConvertUnits(InputWorkspace=ws,
                                          OutputWorkspace=workspace_names,
                                          Target="dSpacing")
        remove_intermediate_workspace(ws)
        output_list.append(dspacing_ws)
        mantid.SaveNexus(Filename=output_file_paths["nxs_filename"],
                         InputWorkspace=dspacing_ws,
                         Append=append)
        append = True
        index += 1

    for i in range(0, save_range):
        monitor_ws_name = output_file_paths["output_name"] + "_mod" + str(i +
                                                                          10)

        monitor_ws = calibrated_spectra[i + 9]
        to_save = mantid.CloneWorkspace(InputWorkspace=monitor_ws,
                                        OutputWorkspace=monitor_ws_name)

        mantid.SaveGSS(InputWorkspace=to_save,
                       Filename=output_file_paths["gss_filename"],
                       Append=True,
                       Bank=i + 5)
        to_save = mantid.ConvertUnits(InputWorkspace=to_save,
                                      OutputWorkspace=monitor_ws_name,
                                      Target="dSpacing")
        mantid.SaveNexus(Filename=output_file_paths["nxs_filename"],
                         InputWorkspace=to_save,
                         Append=True)

        output_list.append(to_save)

    return output_list
Esempio n. 11
0
    def save_combined_panel(self, run, panel):
        panel_combination = {5: 6, 4: 7, 3: 8, 2: 9, 1: 10}
        input_ws1 = "w{0}-{1}foc".format(run, panel)
        input_ws2 = "w{0}-{1}foc".format(run, panel_combination.get(panel))
        combined = "{0}{1}-{2}_{3}foc{4}".format("{0}", run, panel,
                                                 panel_combination.get(panel),
                                                 "{1}")
        combined_save = combined.format("", "{}")
        combined_ws = combined.format("w", "")

        simple.RebinToWorkspace(WorkspaceToRebin=input_ws2,
                                WorkspaceToMatch=input_ws1,
                                OutputWorkspace=input_ws2,
                                PreserveEvents='0')
        simple.Plus(LHSWorkspace=input_ws1,
                    RHSWorkspace=input_ws2,
                    OutputWorkspace=combined_ws)
        simple.ConvertUnits(InputWorkspace=combined_ws,
                            OutputWorkspace=combined_ws + "-d",
                            Target="dSpacing",
                            EMode="Elastic")

        simple.SaveGSS(combined_ws,
                       os.path.join(self.user_directory,
                                    combined_save.format("raw.gss")),
                       Append=False,
                       Bank=1)
        simple.SaveFocusedXYE(
            combined_ws,
            os.path.join(self.user_directory, combined_save.format("raw.dat")))
        simple.SaveNexusProcessed(
            combined_ws,
            os.path.join(self.user_directory, combined_save.format("raw.nxs")))
Esempio n. 12
0
 def load_data(self, file_path):
     r"""
     # type: (unicode) -> WorkspaceGroup
     @brief Load one or more data sets according to the needs ot the instrument.
     @details This function assumes that when loading more than one data file, the files are congruent and their
     events will be added together.
     @param file_path: absolute path to one or more data files. If more than one, paths should be concatenated
     with the plus symbol '+'.
     @returns WorkspaceGroup with any number of cross-sections
     """
     fp_instance = FilePath(file_path)
     xs_list = list()
     temp_workspace_root_name = ''.join(
         random.sample(string.ascii_letters,
                       12))  # random string of 12 characters
     workspace_root_name = fp_instance.run_numbers(
         string_representation='short')
     for path in fp_instance.single_paths:
         is_legacy = path.endswith(".nxs")
         if is_legacy or not USE_SLOW_FLIPPER_LOG:
             _path_xs_list = api.MRFilterCrossSections(
                 Filename=path,
                 PolState=self.pol_state,
                 AnaState=self.ana_state,
                 PolVeto=self.pol_veto,
                 AnaVeto=self.ana_veto,
                 CrossSectionWorkspaces="%s_entry" %
                 temp_workspace_root_name)
             # Only keep good workspaces, and get rid of the rejected events
             path_xs_list = [
                 ws for ws in _path_xs_list if
                 not ws.getRun()['cross_section_id'].value == 'unfiltered'
             ]
         else:
             ws = api.LoadEventNexus(Filename=path,
                                     OutputWorkspace="raw_events")
             path_xs_list = self.dummy_filter_cross_sections(
                 ws, name_prefix=temp_workspace_root_name)
         if len(
                 xs_list
         ) == 0:  # initialize xs_list with the cross sections of the first data file
             xs_list = path_xs_list
             for ws in xs_list:  # replace the temporary names with the run number(s)
                 name_new = str(ws).replace(temp_workspace_root_name,
                                            workspace_root_name)
                 api.RenameWorkspace(str(ws), name_new)
         else:
             for i, ws in enumerate(xs_list):
                 api.Plus(LHSWorkspace=str(ws),
                          RHSWorkspace=str(path_xs_list[i]),
                          OutputWorkspace=str(ws))
     # Insert a log indicating which run numbers contributed to this cross-section
     for ws in xs_list:
         api.AddSampleLog(
             Workspace=str(ws),
             LogName='run_numbers',
             LogText=fp_instance.run_numbers(string_representation='short'),
             LogType='String')
     return xs_list
 def _sum_signal(self, datasf, datansf, deterota):
     result = dict.fromkeys(deterota)
     for angle in deterota:
         wsname = 'sum' + str(angle)
         api.Plus(datasf[angle], datansf[angle], OutputWorkspace=wsname)
         self.toremove.append(wsname)
         result[angle] = wsname
     return result
Esempio n. 14
0
def _focus_mode_trans(output_file_paths, atten, instrument,
                      calibrated_spectra):
    summed_ws = mantid.CloneWorkspace(InputWorkspace=calibrated_spectra[0])
    for i in range(1, 9):  # Add workspaces 2-9 to workspace 1
        summed_ws = mantid.Plus(LHSWorkspace=summed_ws,
                                RHSWorkspace=calibrated_spectra[i])

    summed_ws = mantid.Scale(InputWorkspace=summed_ws,
                             Factor=0.111111111111111)

    if atten:
        # Clone a workspace which is not attenuated
        no_att = output_file_paths["output_name"] + "_noatten"
        mantid.CloneWorkspace(InputWorkspace=summed_ws, OutputWorkspace=no_att)

        summed_ws = mantid.ConvertUnits(InputWorkspace=summed_ws,
                                        Target="dSpacing")
        summed_ws = instrument._attenuate_workspace(summed_ws)
        summed_ws = mantid.ConvertUnits(InputWorkspace=summed_ws, Target="TOF")

    mantid.SaveGSS(InputWorkspace=summed_ws,
                   Filename=output_file_paths["gss_filename"],
                   Append=False,
                   Bank=1)
    mantid.SaveFocusedXYE(InputWorkspace=summed_ws,
                          Filename=output_file_paths["tof_xye_filename"],
                          Append=False,
                          IncludeHeader=False)

    summed_ws = mantid.ConvertUnits(InputWorkspace=summed_ws,
                                    Target="dSpacing")

    # Rename to user friendly name:
    summed_ws_name = output_file_paths["output_name"] + "_mods1-9"
    summed_ws = mantid.RenameWorkspace(InputWorkspace=summed_ws,
                                       OutputWorkspace=summed_ws_name)

    mantid.SaveFocusedXYE(InputWorkspace=summed_ws,
                          Filename=output_file_paths["dspacing_xye_filename"],
                          Append=False,
                          IncludeHeader=False)
    mantid.SaveNexus(InputWorkspace=summed_ws,
                     Filename=output_file_paths["nxs_filename"],
                     Append=False)

    output_list = [summed_ws]

    for i in range(0, 9):
        workspace_name = output_file_paths["output_name"] + "_mod" + str(i + 1)
        to_save = mantid.ConvertUnits(InputWorkspace=calibrated_spectra[i],
                                      Target="dSpacing",
                                      OutputWorkspace=workspace_name)
        output_list.append(to_save)
        mantid.SaveNexus(Filename=output_file_paths["nxs_filename"],
                         InputWorkspace=to_save,
                         Append=True)

    return output_list
Esempio n. 15
0
    def _sumRuns(self, run_set, sam_ws, mon_ws, extra_ext=None):
        """
        Aggregate the set of runs
        @param run_set: list of run numbers
        @param sam_ws:  name of aggregate workspace for the sample
        @param mon_ws:  name of aggregate workspace for the monitors
        @param extra_ext: string to be added to the temporary workspaces
        """
        for run in run_set:
            ws_name = self._makeRunName(run)
            if extra_ext is not None:
                ws_name += extra_ext
            mon_ws_name = ws_name + "_monitors"
            run_file = self._makeRunFile(run)

            # Faster loading for the 311 reflection
            if self._reflection["name"] == "silicon311":
                kwargs = {"BankName": "bank2"}  # 311 analyzers only in bank2
            else:
                kwargs = {}

            sapi.LoadEventNexus(Filename=run_file,
                                OutputWorkspace=ws_name,
                                **kwargs)
            if str(run) + ':' in self.getProperty("ExcludeTimeSegment").value:
                self._filterEvents(str(run), ws_name)

            if self._MonNorm:
                sapi.LoadNexusMonitors(Filename=run_file,
                                       OutputWorkspace=mon_ws_name)

            if sam_ws != ws_name:
                sapi.Plus(LHSWorkspace=sam_ws,
                          RHSWorkspace=ws_name,
                          OutputWorkspace=sam_ws)
                sapi.DeleteWorkspace(ws_name)
            if mon_ws != mon_ws_name and self._MonNorm:
                sapi.Plus(LHSWorkspace=mon_ws,
                          RHSWorkspace=mon_ws_name,
                          OutputWorkspace=mon_ws)
                sapi.DeleteWorkspace(mon_ws_name)
Esempio n. 16
0
    def _sumRuns(self, run_set, sam_ws, mon_ws, extra_ext=None):
        for run in run_set:
            ws_name = self._makeRunName(run)
            if extra_ext is not None:
                ws_name += extra_ext
            mon_ws_name = ws_name  + "_monitors"
            run_file = self._makeRunFile(run)

            api.Load(Filename=run_file, OutputWorkspace=ws_name)
            if not self._noMonNorm:
                api.LoadNexusMonitors(Filename=run_file,
                                      OutputWorkspace=mon_ws_name)
            if sam_ws != ws_name:
                api.Plus(LHSWorkspace=sam_ws, RHSWorkspace=ws_name,
                         OutputWorkspace=sam_ws)
                api.DeleteWorkspace(ws_name)
            if mon_ws != mon_ws_name and not self._noMonNorm:
                api.Plus(LHSWorkspace=mon_ws,
                         RHSWorkspace=mon_ws_name,
                         OutputWorkspace=mon_ws)
                api.DeleteWorkspace(mon_ws_name)
Esempio n. 17
0
def _load_raw_file_range(files, input_dir, instrument):
    loop = 0
    num = files.split("_")
    frange = list(range(int(num[0]), int(num[1]) + 1))
    out_ws = None
    for i in frange:
        file_path = instrument._generate_input_full_path(i, input_dir)
        outwork = "run" + str(i)
        mantid.LoadRaw(Filename=file_path,
                       OutputWorkspace=outwork,
                       LoadLogFiles="0")
        loop += 1
        if loop == 2:
            firstwk = "run" + str(i - 1)
            secondwk = "run" + str(i)
            out_ws = mantid.Plus(LHSWorkspace=firstwk, RHSWorkspace=secondwk)
            mantid.mtd.remove(firstwk)
            mantid.mtd.remove(secondwk)
        elif loop > 2:
            secondwk = "run" + str(i)
            out_ws = mantid.Plus(LHSWorkspace=out_ws, RHSWorkspace=secondwk)
            mantid.mtd.remove(secondwk)
    return out_ws
    def _fr_correction(self):
        """
        applies flipping ratio correction
        according to J Appl. Cryst. 42, 69-84, 2009
        creates the corrected workspaces
        """
        wslist = []
        # 1. retrieve NiCr and Background
        sf_nicr = api.AnalysisDataService.retrieve(self.input_workspaces['SF_NiCr'])
        nsf_nicr = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_NiCr'])
        sf_bkgr = api.AnalysisDataService.retrieve(self.input_workspaces['SF_Background'])
        nsf_bkgr = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_Background'])

        # 2. subtract background from NiCr
        _sf_nicr_bg_ = sf_nicr - sf_bkgr
        wslist.append(_sf_nicr_bg_.name())
        _nsf_nicr_bg_ = nsf_nicr - nsf_bkgr
        wslist.append(_nsf_nicr_bg_.name())
        # check negative values, throw exception
        sf_arr = np.array(_sf_nicr_bg_.extractY()).flatten()
        nsf_arr = np.array(_nsf_nicr_bg_.extractY()).flatten()
        sf_neg_values = np.where(sf_arr < 0)[0]
        nsf_neg_values = np.where(nsf_arr < 0)[0]
        if len(sf_neg_values) or len(nsf_neg_values):
            self.cleanup(wslist)
            message = "Background is higher than NiCr signal!"
            self.log().error(message)
            raise RuntimeError(message)

        # 3. calculate flipping ratio F - 1 = (NiCr - Bkg)NSF/(NiCr - Bkg)SF - 1
        _coef_ws_ = api.Divide(LHSWorkspace=_nsf_nicr_bg_, RHSWorkspace=_sf_nicr_bg_, WarnOnZeroDivide=True) - 1.0
        wslist.append(_coef_ws_.name())
        # 4. apply correction raw data
        sf_data_ws = api.AnalysisDataService.retrieve(self.input_workspaces['SF_Data'])
        nsf_data_ws = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_Data'])
        # NSF_corr[i] = NSF[i] + (NSF[i] - SF[i])/(F[i] - 1)
        _diff_ws_ = nsf_data_ws - sf_data_ws
        wslist.append(_diff_ws_.name())
        _tmp_ws_ = api.Divide(LHSWorkspace=_diff_ws_, RHSWorkspace=_coef_ws_, WarnOnZeroDivide=True)
        _tmp_ws_.setYUnit(nsf_data_ws.YUnit())
        api.Plus(LHSWorkspace=nsf_data_ws, RHSWorkspace=_tmp_ws_, OutputWorkspace=self.nsf_outws_name)
        # SF_corr[i] = SF[i] - (NSF[i] - SF[i])/(F[i] - 1)
        api.Minus(LHSWorkspace=sf_data_ws, RHSWorkspace=_tmp_ws_, OutputWorkspace=self.sf_outws_name)
        api.DeleteWorkspace(_tmp_ws_)

        # cleanup
        self.cleanup(wslist)
        return
Esempio n. 19
0
def _focus_mode_all(output_file_paths, calibrated_spectra):
    first_spectrum = calibrated_spectra[0]
    summed_spectra = mantid.CloneWorkspace(InputWorkspace=first_spectrum)

    for i in range(1, 9):  # TODO why is this 1-8
        summed_spectra = mantid.Plus(LHSWorkspace=summed_spectra,
                                     RHSWorkspace=calibrated_spectra[i])

    summed_spectra_name = output_file_paths["output_name"] + "_mods1-9"

    summed_spectra = mantid.Scale(InputWorkspace=summed_spectra,
                                  Factor=0.111111111111111,
                                  OutputWorkspace=summed_spectra_name)
    mantid.SaveGSS(InputWorkspace=summed_spectra,
                   Filename=output_file_paths["gss_filename"],
                   Append=False,
                   Bank=1)

    summed_spectra = mantid.ConvertUnits(InputWorkspace=summed_spectra,
                                         Target="dSpacing",
                                         OutputWorkspace=summed_spectra_name)
    mantid.SaveNexus(Filename=output_file_paths["nxs_filename"],
                     InputWorkspace=summed_spectra,
                     Append=False)

    output_list = [summed_spectra]
    for i in range(0, 3):
        spectra_index = (
            i + 9
        )  # We want workspaces 10/11/12 so compensate for 0 based index
        ws_to_save = calibrated_spectra[
            spectra_index]  # Save out workspaces 10/11/12
        output_name = output_file_paths["output_name"] + "_mod" + str(
            spectra_index + 1)
        mantid.SaveGSS(InputWorkspace=ws_to_save,
                       Filename=output_file_paths["gss_filename"],
                       Append=True,
                       Bank=i + 2)
        ws_to_save = mantid.ConvertUnits(InputWorkspace=ws_to_save,
                                         OutputWorkspace=output_name,
                                         Target="dSpacing")
        output_list.append(ws_to_save)
        mantid.SaveNexus(Filename=output_file_paths["nxs_filename"],
                         InputWorkspace=ws_to_save,
                         Append=True)

    return output_list
Esempio n. 20
0
    def sum_workspaces(workspace_name_list, target_workspace_name):
        """
        sum 2 workspaces together
        example: [self._inAccumulationWorkspaceName, workspace_i],  self._inAccumulationWorkspaceName)
        :param workspace_name_list:
        :param target_workspace_name:
        :return: None or warning message
        """
        # check whether inputs are valid
        assert isinstance(workspace_name_list, list), 'Workspace names {0} must be given in list but not {1}.' \
                                                      ''.format(workspace_name_list,
                                                                type(workspace_name_list))
        assert isinstance(target_workspace_name, str), 'Target workspace name {0} for summed workspaces must be of ' \
                                                       'type {1}'.format(target_workspace_name,
                                                                         type(target_workspace_name))

        if len(workspace_name_list) != 2:
            raise RuntimeError('Sum workspaces must have 2 inputs')

        # plus
        mantidsimple.Plus(LHSWorkspace=workspace_name_list[0], RHSWorkspace=workspace_name_list[1],
                          OutputWorkspace=target_workspace_name)

        # set the run number correctly
        left_ws = ADS.retrieve(workspace_name_list[0])
        right_ws = ADS.retrieve(workspace_name_list[1])
        left_run_number = left_ws.getRunNumber()
        right_run_number = right_ws.getRunNumber()
        target_ws = ADS.retrieve(target_workspace_name)

        return_value = None
        if left_run_number == right_run_number:
            # same so do nothing
            pass
        elif left_run_number == 0:
            # one with run 0 and one is different
            target_ws.getRun().addProperty('run_number', right_run_number, replace=True)
        else:
            # they are different... warning
            return_value = 'Workspaces to sum have 2 different run numbers {0} and {1}.' \
                           ''.format(left_run_number, right_run_number)
            print('[WARNING] {0}'.format(return_value))
        # END-IF

        return return_value
Esempio n. 21
0
    def _sum_runs(self, run_set, sam_ws):
        r"""
        Aggregate the set of runs

        Parameters
        ----------
        run_set: list
            Run numbers
        sam_ws:  str
            Name of aggregate workspace for the sample
        extra_ext: str
            Suffix to be added to the temporary workspaces
        """
        self.load_single_run(run_set[0], sam_ws)
        for run in run_set[1:]:
            ws_name = tws('sum_runs_' + run)
            self.load_single_run(run, ws_name)
            sapi.Plus(LHSWorkspace=sam_ws,
                      RHSWorkspace=ws_name,
                      OutputWorkspace=sam_ws)
Esempio n. 22
0
    def _sum_monitors(self, run_set, mon_ws):
        r"""
        Generate aggregate monitor workspace from a list of run numbers

        Parameters
        ----------
        run_set: list
            List of run numbers
        mon_ws: str
            Name of output workspace
        """
        sapi.LoadNexusMonitors(Filename=self._make_run_file(run_set[0]),
                               OutputWorkspace=mon_ws)
        for run in run_set[1:]:
            ws_name = tws('sum_monitors_' + run)
            sapi.LoadNexusMonitors(Filename=self._make_run_file(run),
                                   OutputWorkspace=ws_name)
            sapi.Plus(LHSWorkspace=mon_ws,
                      RHSWorkspace=ws_name,
                      OutputWorkspace=mon_ws)
Esempio n. 23
0
 def add_runs(self, run1, run2, suffix):
     # prevent new suffix being appended to old one
     out = suffix + ";" + run1.split(";")[1]
     mantid.Plus(run1, run2, OutputWorkspace=out)
     return out
Esempio n. 24
0
    def _multiple_load(self, data_file, workspace, property_manager,
                       property_manager_name):
        # Check whether we have a list of files that need merging
        #   Make sure we process a list of files written as a string
        def _load_data(filename, output_ws):
            if not property_manager.existsProperty("LoadAlgorithm"):
                raise RuntimeError, "SANS reduction not set up properly: missing load algorithm"
            p = property_manager.getProperty("LoadAlgorithm")
            alg = Algorithm.fromString(p.valueAsStr)
            alg.setProperty("Filename", filename)
            alg.setProperty("OutputWorkspace", output_ws)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            msg = "Loaded %s\n" % filename
            if alg.existsProperty("OutputMessage"):
                msg = alg.getProperty("OutputMessage").value
            return msg

        # Get instrument to use with FileFinder
        instrument = ''
        if property_manager.existsProperty("InstrumentName"):
            instrument = property_manager.getProperty("InstrumentName").value

        output_str = ''
        if type(data_file) == str:
            data_file = find_data(data_file,
                                  instrument=instrument,
                                  allow_multiple=True)
        if type(data_file) == list:
            monitor = 0.0
            timer = 0.0
            for i in range(len(data_file)):
                if i == 0:
                    output_str += _load_data(data_file[i], workspace)
                    # Use the first file location as the default output directory
                    head, tail = os.path.split(data_file[0])
                    if os.path.isdir(head):
                        self.default_output_dir = head
                else:
                    output_str += _load_data(data_file[i], '__tmp_wksp')
                    api.Plus(LHSWorkspace=workspace,
                             RHSWorkspace='__tmp_wksp',
                             OutputWorkspace=workspace)
                    # Get the monitor and timer values
                    ws = AnalysisDataService.retrieve('__tmp_wksp')
                    monitor += ws.getRun().getProperty("monitor").value
                    timer += ws.getRun().getProperty("timer").value

            # Get the monitor and timer of the first file, which haven't yet
            # been added to the total
            ws = AnalysisDataService.retrieve(workspace)
            monitor += ws.getRun().getProperty("monitor").value
            timer += ws.getRun().getProperty("timer").value

            # Update the timer and monitor
            ws.getRun().addProperty("monitor", monitor, True)
            ws.getRun().addProperty("timer", timer, True)

            if AnalysisDataService.doesExist('__tmp_wksp'):
                AnalysisDataService.remove('__tmp_wksp')
        else:
            output_str += "Loaded %s\n" % data_file
            output_str += _load_data(data_file, workspace)
            head, tail = os.path.split(data_file)
            if os.path.isdir(head):
                self.default_output_dir = head
        return output_str
Esempio n. 25
0
    def _load_and_sum_runs(self, spectra):
        """Load the input set of runs & sum them if there
        is more than one.
            @param spectra :: The list of spectra to load
            @returns a tuple of length 2 containing (main_detector_ws, monitor_ws)
        """
        isis = config.getFacility("ISIS")
        inst_prefix = isis.instrument("VESUVIO").shortName()

        runs = self._get_runs()

        self.summed_ws, self.summed_mon = "__loadraw_evs", "__loadraw_evs_monitors"
        for index, run in enumerate(runs):
            run = inst_prefix + str(run)
            self._raise_error_period_scatter(run, self._back_scattering)
            if index == 0:
                out_name, out_mon = SUMMED_WS, SUMMED_MON
            else:
                out_name, out_mon = SUMMED_WS + 'tmp', SUMMED_MON + 'tmp'

            # Load data
            raw_filepath = FileFinder.findRuns(run)[0]
            ms.LoadRaw(Filename=raw_filepath,
                       SpectrumList=spectra,
                       OutputWorkspace=out_name,
                       LoadMonitors='Exclude',
                       EnableLogging=_LOGGING_)
            ms.LoadRaw(Filename=raw_filepath,
                       SpectrumList=self._mon_spectra,
                       OutputWorkspace=out_mon,
                       EnableLogging=_LOGGING_)

            # Sum
            if index > 0:
                ms.Plus(LHSWorkspace=SUMMED_WS,
                        RHSWorkspace=out_name,
                        OutputWorkspace=SUMMED_WS,
                        EnableLogging=_LOGGING_)
                ms.Plus(LHSWorkspace=SUMMED_MON,
                        RHSWorkspace=out_mon,
                        OutputWorkspace=SUMMED_MON,
                        EnableLogging=_LOGGING_)

                ms.DeleteWorkspace(out_name, EnableLogging=_LOGGING_)
                ms.DeleteWorkspace(out_mon, EnableLogging=_LOGGING_)

        # Check to see if extra data needs to be loaded to normalise in data
        x_max = self._tof_max
        if self._foil_out_norm_end > self._tof_max:
            x_max = self._foil_out_norm_end
            self._crop_required = True

        ms.CropWorkspace(Inputworkspace= SUMMED_WS,
                         OutputWorkspace=SUMMED_WS,
                         XMax=x_max,
                         EnableLogging=_LOGGING_)
        ms.CropWorkspace(Inputworkspace= SUMMED_MON,
                         OutputWorkspace=SUMMED_MON,
                         XMax=self._mon_tof_max,
                         EnableLogging=_LOGGING_)

        summed_data, summed_mon = mtd[SUMMED_WS], mtd[SUMMED_MON]
        self._load_diff_mode_parameters(summed_data)
        return summed_data, summed_mon
Esempio n. 26
0
 def add_runs(self, l, r, suffix):
     # prevent new suffix being appended to old one
     out = lutils.replace_workspace_name_suffix(l, suffix)
     mantid.Plus(l, r, OutputWorkspace=out)
     return out